index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
20,193
|
DevinDeSilva/BookLibrary
|
refs/heads/main
|
/Library/forms/DeleteBookForm.py
|
from django import forms
class DeleteBook(forms.Form):
title = forms.CharField(label='Title', max_length=30, required=True)
|
{"/Library/views.py": ["/Library/forms/RegisterBookForm.py", "/Library/forms/DeleteBookForm.py"]}
|
20,194
|
DevinDeSilva/BookLibrary
|
refs/heads/main
|
/Library/views.py
|
from django.http import HttpResponseRedirect
from django.shortcuts import redirect
from django.shortcuts import render
from . import models
from .forms.RegisterBookForm import RegisterBook
from .forms.DeleteBookForm import DeleteBook
# Create your views here.
def HomePage(request):
try:
search_string = request.GET.get('search_str', None)
searh_by = request.GET.get('search_by', None)
book_list = models.getBooks(search_string, searh_by)
print(searh_by, search_string)
return render(request, 'HomePage.html', {
"book_list": book_list
})
except IndexError as e:
print(str(e))
return render(request, 'HomePage.html', {
"book_list": []
})
except Exception as e:
print(str(e))
return render(request, 'HomePage.html', {
"book_list": []
})
def addBookPage(request):
return render(request,
'addBook.html'
)
def deleteBookPage(request):
return render(request,
'deleteBook.html'
)
def deleteBook(request):
try:
if request.method == 'POST':
form = DeleteBook(request.POST)
if form.is_valid():
models.deleteBook(request.POST['title'])
return HttpResponseRedirect('/?success=Successfully book deleted')
else:
raise Exception("data is invalid")
else:
form = DeleteBook()
return render(request, 'deleteBook.html', {'form': form})
except Exception as e:
print(str(e))
return HttpResponseRedirect(f'/delete?error="error while deleting a book:{str(e)}')
def addBook(request):
try:
if request.method == 'POST':
form = RegisterBook(request.POST)
if form.is_valid():
models.addBook(request.POST['title'],
request.POST['author'],
request.POST['genre'],
request.POST['height'],
request.POST['publisher'])
return HttpResponseRedirect('/?success=Successfully data added')
else:
raise Exception("data is invalid")
else:
form = RegisterBook()
return render(request, 'addBook.html', {'form': form})
except Exception as e:
print(str(e))
return redirect(f'/add?error="error while adding a book:{str(e)}')
|
{"/Library/views.py": ["/Library/forms/RegisterBookForm.py", "/Library/forms/DeleteBookForm.py"]}
|
20,195
|
DevinDeSilva/BookLibrary
|
refs/heads/main
|
/Library/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.HomePage),
path('add', views.addBook),
path('delete', views.deleteBook),
]
|
{"/Library/views.py": ["/Library/forms/RegisterBookForm.py", "/Library/forms/DeleteBookForm.py"]}
|
20,196
|
DevinDeSilva/BookLibrary
|
refs/heads/main
|
/Library/forms/RegisterBookForm.py
|
from django import forms
class RegisterBook(forms.Form):
title = forms.CharField(label='Title', max_length=30, required=True)
author = forms.CharField(label='Author', max_length=30, required=True)
genre = forms.CharField(label='Genre', max_length=30, required=True)
height = forms.IntegerField(label='Height', max_value=10000, required=True)
publisher = forms.CharField(label='Publisher', max_length=50, required=True)
|
{"/Library/views.py": ["/Library/forms/RegisterBookForm.py", "/Library/forms/DeleteBookForm.py"]}
|
20,198
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/nets/conv.py
|
"""
conv.py
=========
Convolutional NN modules and custom blocks
Created by Maxim Ziatdinov (email: ziatdinovmax@gmail.com)
"""
from typing import Union, Tuple, List
import torch
import torch.nn as nn
import torch.nn.functional as F
from ..utils import get_activation, get_bnorm, get_conv, get_maxpool
from warnings import warn, filterwarnings
filterwarnings("ignore", module="torch.nn.functional")
tt = torch.tensor
class convEncoderNet(nn.Module):
"""
Standard convolutional encoder
"""
def __init__(self,
input_dim: Tuple[int],
input_channels: int = 1,
latent_dim: int = 2,
layers_per_block: List[int] = None,
hidden_dim: int = 32,
batchnorm: bool = True,
activation: str = "lrelu",
softplus_out: bool = True,
pool: bool = True,
) -> None:
"""
Initializes encoder module
"""
super(convEncoderNet, self).__init__()
if layers_per_block is None:
layers_per_block = [1, 2, 2]
output_dim = (tt(input_dim) // 2**len(layers_per_block)).tolist()
output_channels = hidden_dim * len(layers_per_block)
self.latent_dim = latent_dim
self.feature_extractor = FeatureExtractor(
len(input_dim), input_channels, layers_per_block, hidden_dim,
batchnorm, activation, pool)
self.features2latent = features_to_latent(
[output_channels, *output_dim], 2*latent_dim)
self.activation_out = nn.Softplus() if softplus_out else lambda x: x
def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor]:
"""
Forward pass
"""
x = self.feature_extractor(x)
encoded = self.features2latent(x)
mu, sigma = encoded.split(self.latent_dim, 1)
sigma = self.activation_out(sigma)
return mu, sigma
class convDecoderNet(nn.Module):
"""
Standard convolutional decoder
"""
def __init__(self,
latent_dim: int,
output_dim: int,
output_channels: int = 1,
layers_per_block: List[int] = None,
hidden_dim: int = 96,
batchnorm: bool = True,
activation: str = "lrelu",
sigmoid_out: bool = True,
upsampling_mode: str = "bilinear",
) -> None:
"""
Initializes decoder module
"""
super(convDecoderNet, self).__init__()
if layers_per_block is None:
layers_per_block = [2, 2, 1]
input_dim = (tt(output_dim) // 2**len(layers_per_block)).tolist()
self.latent2features = latent_to_features(
latent_dim, [hidden_dim, *input_dim])
self.upsampler = Upsampler(
len(output_dim), hidden_dim, layers_per_block, output_channels,
batchnorm, activation, upsampling_mode)
self.activation_out = nn.Sigmoid() if sigmoid_out else lambda x: x
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Forward pass
"""
x = self.latent2features(x)
x = self.activation_out(self.upsampler(x))
return x
class ConvBlock(nn.Module):
"""
Creates a block of layers each consisting of convolution operation,
(optional) nonlinear activation and (optional) batch normalization
"""
def __init__(self,
ndim: int,
nlayers: int,
input_channels: int,
output_channels: int,
kernel_size: Union[Tuple[int], int] = 3,
stride: Union[Tuple[int], int] = 1,
padding: Union[Tuple[int], int] = 1,
batchnorm: bool = False,
activation: str = "lrelu",
pool: bool = False,
) -> None:
"""
Initializes module parameters
"""
super(ConvBlock, self).__init__()
if not 0 < ndim < 4:
raise AssertionError("ndim must be equal to 1, 2 or 3")
activation = get_activation(activation)
block = []
for i in range(nlayers):
input_channels = output_channels if i > 0 else input_channels
block.append(get_conv(ndim)(input_channels, output_channels,
kernel_size=kernel_size, stride=stride, padding=padding))
if activation is not None:
block.append(activation())
if batchnorm:
block.append(get_bnorm(ndim)(output_channels))
if pool:
block.append(get_maxpool(ndim)(2, 2))
self.block = nn.Sequential(*block)
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Defines a forward pass
"""
output = self.block(x)
return output
class UpsampleBlock(nn.Module):
"""
Upsampling performed using bilinear or nearest-neigbor interpolation
followed by 1-by-1 convolution, which an be used to reduce a number of
feature channels
"""
def __init__(self,
ndim: int,
input_channels: int,
output_channels: int,
scale_factor: int = 2,
mode: str = "bilinear") -> None:
"""
Initializes module parameters
"""
super(UpsampleBlock, self).__init__()
warn_msg = ("'bilinear' mode is not supported for 1D and 3D;" +
" switching to 'nearest' mode")
if mode not in ("bilinear", "nearest"):
raise NotImplementedError(
"Use 'bilinear' or 'nearest' for upsampling mode")
if not 0 < ndim < 4:
raise AssertionError("ndim must be equal to 1, 2 or 3")
if mode == "bilinear" and ndim in (3, 1):
warn(warn_msg, category=UserWarning)
mode = "nearest"
self.mode = mode
self.scale_factor = scale_factor
self.conv = get_conv(ndim)(
input_channels, output_channels,
kernel_size=1, stride=1, padding=0)
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Defines a forward pass
"""
x = F.interpolate(
x, scale_factor=self.scale_factor, mode=self.mode)
return self.conv(x)
class FeatureExtractor(nn.Sequential):
"""
Convolutional feature extractor
"""
def __init__(self,
ndim: int,
input_channels: int = 1,
layers_per_block: List[int] = None,
nfilters: int = 32,
batchnorm: bool = True,
activation: str = "lrelu",
pool: bool = True,
) -> None:
"""
Initializes feature extractor module
"""
super(FeatureExtractor, self).__init__()
if layers_per_block is None:
layers_per_block = [1, 2, 2]
for i, layers in enumerate(layers_per_block):
in_filters = input_channels if i == 0 else nfilters * i
block = ConvBlock(ndim, layers, in_filters, nfilters * (i+1),
batchnorm=batchnorm, activation=activation,
pool=pool)
self.add_module("c{}".format(i), block)
class Upsampler(nn.Sequential):
"""
Convolutional upsampler
"""
def __init__(self,
ndim: int,
input_channels: int = 96,
layers_per_block: List[int] = None,
output_channels: int = 1,
batchnorm: bool = True,
activation: str = "lrelu",
upsampling_mode: str = "bilinear",
) -> None:
"""
Initializes upsampler module
"""
super(Upsampler, self).__init__()
if layers_per_block is None:
layers_per_block = [2, 2, 1]
nfilters = input_channels
for i, layers in enumerate(layers_per_block):
in_filters = nfilters if i == 0 else nfilters // i
block = ConvBlock(ndim, layers, in_filters, nfilters // (i+1),
batchnorm=batchnorm, activation=activation,
pool=False)
self.add_module("conv_block_{}".format(i), block)
up = UpsampleBlock(ndim, nfilters // (i+1), nfilters // (i+1),
mode=upsampling_mode)
self.add_module("up_{}".format(i), up)
out = ConvBlock(ndim, 1, nfilters // (i+1), output_channels,
1, 1, 0, activation=None)
self.add_module("output_layer", out)
class features_to_latent(nn.Module):
"""
Maps features (usually, from a convolutional net/layer) to latent space
"""
def __init__(self, input_dim: Tuple[int], latent_dim: int = 2) -> None:
super(features_to_latent, self).__init__()
self.reshape_ = torch.prod(tt(input_dim))
self.fc_latent = nn.Linear(self.reshape_, latent_dim)
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = x.view(-1, self.reshape_)
return self.fc_latent(x)
class latent_to_features(nn.Module):
"""
Maps latent vector to feature space
"""
def __init__(self, latent_dim: int, out_dim: Tuple[int]) -> None:
super(latent_to_features, self).__init__()
self.reshape_ = out_dim
self.fc = nn.Linear(latent_dim, torch.prod(tt(out_dim)).item())
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = self.fc(x)
return x.view(-1, *self.reshape_)
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,199
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/models/__init__.py
|
"""
Variational autoencoder and encoder-decoder models
"""
from .ivae import iVAE
from .ssivae import ssiVAE
from .ss_reg_ivae import ss_reg_iVAE
from .jivae import jiVAE
from .ved import VED
__all__ = ['iVAE', 'jiVAE', 'ssiVAE', 'ss_reg_iVAE', 'VED']
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,200
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/__version__.py
|
version= '0.2.3'
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,201
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/trainers/auxsvi.py
|
from typing import Type, Optional, Union, Dict
from collections import OrderedDict
from copy import deepcopy as dc
import torch
import torch.nn as nn
import pyro
import pyro.infer as infer
import pyro.optim as optim
from ..utils import set_deterministic_mode, average_weights
class auxSVItrainer:
"""
Stochastic variational inference (SVI) trainer for variational models
with auxillary losses
Args:
model:
Initialized model. Must be a subclass of torch.nn.Module
and have self.model and self.guide methods
optimizer:
Pyro optimizer (Defaults to Adam with learning rate 5e-4)
seed:
Enforces reproducibility
Keyword Args:
lr: learning rate (Default: 5e-4)
device:
Sets device to which model and data will be moved.
Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.
Examples:
>>> # Initialize model for semi supervised learning
>>> data_dim = (28, 28)
>>> ssvae = pyroved.models.ssiVAE(data_dim, latent_dim=2, num_classes=10, invariances=['r'])
>>> # Initialize SVI trainer for models with auxiliary loss terms
>>> trainer = auxSVItrainer(ssvae)
>>> # Train for 200 epochs:
>>> for _ in range(200):
>>> trainer.step(loader_unsuperv, loader_superv, loader_valid)
>>> trainer.print_statistics()
"""
def __init__(self,
model: Type[nn.Module],
task: str = "classification",
optimizer: Type[optim.PyroOptim] = None,
seed: int = 1,
**kwargs: Union[str, float]
) -> None:
"""
Initializes trainer parameters
"""
pyro.clear_param_store()
set_deterministic_mode(seed)
if task not in ["classification", "regression"]:
raise ValueError("Choose between 'classification' and 'regression' tasks")
self.task = task
self.device = kwargs.get(
"device", 'cuda' if torch.cuda.is_available() else 'cpu')
if optimizer is None:
lr = kwargs.get("lr", 5e-4)
optimizer = optim.Adam({"lr": lr})
if self.task == "classification":
guide = infer.config_enumerate(
model.guide, "parallel", expand=True)
loss = pyro.infer.TraceEnum_ELBO(
max_plate_nesting=1, strict_enumeration_warning=False)
else:
guide = model.guide
loss = pyro.infer.Trace_ELBO()
self.loss_basic = infer.SVI(
model.model, guide, optimizer, loss=loss)
self.loss_aux = infer.SVI(
model.model_aux, model.guide_aux,
optimizer, loss=pyro.infer.Trace_ELBO())
self.model = model
self.history = {"training_loss": [], "test": []}
self.current_epoch = 0
self.running_weights = {}
def compute_loss(self,
xs: torch.Tensor,
ys: Optional[torch.Tensor] = None,
**kwargs: float) -> float:
"""
Computes basic and auxillary losses
"""
xs = xs.to(self.device)
if ys is not None:
ys = ys.to(self.device)
loss = self.loss_basic.step(xs, ys, **kwargs)
loss_aux = self.loss_aux.step(xs, ys, **kwargs)
return loss + loss_aux
def train(self,
loader_unsup: Type[torch.utils.data.DataLoader],
loader_sup: Type[torch.utils.data.DataLoader],
**kwargs: float
) -> float:
"""
Train a single epoch
"""
# Get info on number of supervised and unsupervised batches
sup_batches = len(loader_sup)
unsup_batches = len(loader_unsup)
p = (sup_batches + unsup_batches) // sup_batches
loader_sup = iter(loader_sup)
epoch_loss = 0.
unsup_count = 0
for i, (xs,) in enumerate(loader_unsup):
# Compute and store loss for unsupervised part
epoch_loss += self.compute_loss(xs, **kwargs)
unsup_count += xs.shape[0]
if i % p == 1:
# sample random batches xs and ys
xs, ys = loader_sup.next()
# Compute supervised loss
_ = self.compute_loss(xs, ys, **kwargs)
return epoch_loss / unsup_count
def evaluate(self,
loader_val: Optional[torch.utils.data.DataLoader]) -> float:
"""
Evaluates model's current state on labeled test data
"""
if self.task == "classification":
return self.evaluate_cls(loader_val)
return self.evaluate_reg(loader_val)
def evaluate_cls(self,
loader_val: Optional[torch.utils.data.DataLoader]) -> float:
correct, total = 0, 0
with torch.no_grad():
for data, labels in loader_val:
predicted = self.model.classifier(data)
_, lab_idx = torch.max(labels.cpu(), 1)
correct += (predicted == lab_idx).sum().item()
total += data.size(0)
return correct / total
def evaluate_reg(self,
loader_val: Optional[torch.utils.data.DataLoader]) -> float:
correct = 0
with torch.no_grad():
for data, gt in loader_val:
predicted = self.model.regressor(data)
mse = nn.functional.mse_loss(predicted, gt)
correct += mse
return correct
def step(self,
loader_unsup: torch.utils.data.DataLoader,
loader_sup: torch.utils.data.DataLoader,
loader_val: Optional[torch.utils.data.DataLoader] = None,
**kwargs: float
) -> None:
"""
Single train (and evaluation, if any) step.
Args:
loader_unsup:
Pytorch's dataloader with unlabeled training data
loader_sup:
Pytorch's dataloader with labeled training data
loader_val:
Pytorch's dataloader with validation data
**scale_factor:
Scale factor for KL divergence. See e.g. https://arxiv.org/abs/1804.03599
Default value is 1 (i.e. no scaling)
**aux_loss_multiplier:
Hyperparameter that modulates the importance of the auxiliary loss
term. See Eq. 9 in https://arxiv.org/abs/1406.5298. Default values is 20.
"""
train_loss = self.train(loader_unsup, loader_sup, **kwargs)
self.history["training_loss"].append(train_loss)
if loader_val is not None:
eval_acc = self.evaluate(loader_val)
self.history["test"].append(eval_acc)
self.current_epoch += 1
def save_running_weights(self, net: str) -> None:
"""
Saves the running weights of specified neural net (e.g. "encoder_y")
Usually meant for a classifier neural network
"""
net = getattr(self.model, net)
state_dict_ = OrderedDict()
for k, v in net.state_dict().items():
state_dict_[k] = dc(v).cpu()
self.running_weights[self.current_epoch] = state_dict_
def average_weights(self,
net: str
) -> Dict[int, Dict[str, torch.Tensor]]:
"""
Updates the selected neural net with an averaged weights
"""
net = getattr(self.model, net)
net.load_state_dict(average_weights(self.running_weights))
def print_statistics(self) -> None:
"""
Print training and test (if any) losses for current epoch
"""
e = self.current_epoch
if len(self.history["test"]) > 0:
if self.task == "classification":
template = 'Epoch: {} Training loss: {:.4f}, Test accuracy: {:.4f}'
else:
template = 'Epoch: {} Training loss: {:.4f}, Test MSE: {:.4f}'
print(template.format(e, self.history["training_loss"][-1],
self.history["test"][-1]))
else:
template = 'Epoch: {} Training loss: {:.4f}'
print(template.format(e, self.history["training_loss"][-1]))
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,202
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/models/base.py
|
"""
base.py
=========
Variational encoder-decoder base class
Created by Maxim Ziatdinov (email: ziatdinovmax@gmail.com)
"""
from typing import Tuple, Type, Union, List
from abc import abstractmethod
import torch
import torch.nn as nn
from ..utils import init_dataloader, transform_coordinates, generate_grid
tt = torch.tensor
class baseVAE(nn.Module):
"""Base class for regular and invriant variational encoder-decoder models.
Args:
data_dim:
Dimensionality of the input data; use (height x width) for images
or (length,) for spectra.
invariances:
List with invariances to enforce. For 2D systems, `r` enforces
rotational invariance, `t` enforces invariance to
translations, `sc` enforces a scale invariance, and
invariances=None corresponds to vanilla VAE.
For 1D systems, 't' enforces translational invariance and
invariances=None is vanilla VAE
Keyword Args:
device:
Sets device to which model and data will be moved.
Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.
dx_prior:
Translational prior in x direction (float between 0 and 1)
dy_prior:
Translational prior in y direction (float between 0 and 1)
sc_prior:
Scale prior (usually, sc_prior << 1)
"""
def __init__(self, *args, **kwargs: str):
super(baseVAE, self).__init__()
data_dim, invariances = args
# Set device
self.device = kwargs.get(
"device", 'cuda' if torch.cuda.is_available() else 'cpu')
# Set dimensionality
self.ndim = len(data_dim)
# Set invariances to enforce (number and type)
if invariances is None:
coord = 0
else:
coord = len(invariances)
if self.ndim == 1:
if coord > 1 or invariances[0] != 't':
raise ValueError(
"For 1D data, the only invariance to enforce "
"is translation ('t')")
if 't' in invariances and self.ndim == 2:
coord = coord + 1
self.coord = coord
self.invariances = invariances
# Set coordiante grid
if self.coord > 0:
self.grid = generate_grid(data_dim).to(self.device)
# Prior "belief" about the degree of translational disorder
if self.coord > 0 and 't' in self.invariances:
dx_pri = tt(kwargs.get("dx_prior", 0.1))
dy_pri = kwargs.get("dy_prior", dx_pri.clone())
self.t_prior = (tt([dx_pri, dy_pri]) if self.ndim == 2
else dx_pri).to(self.device)
# Prior "belief" about the degree of scale disorder
if self.coord > 0 and 's' in self.invariances:
self.sc_prior = tt(kwargs.get("sc_prior", 0.1)).to(self.device)
# Encoder and decoder (None by default)
self.encoder_z = None
self.decoder = None
@abstractmethod
def model(self, *args, **kwargs):
"""Pyro's model"""
raise NotImplementedError
@abstractmethod
def guide(self, *args, **kwargs):
"""Pyro's guide"""
raise NotImplementedError
def _split_latent(self, z: torch.Tensor) -> Tuple[torch.Tensor]:
"""
Split latent vector into parts associated with
coordinate transformations and image content
"""
# For 1D, there is only a translation
if self.ndim == 1:
dx = z[:, 0:1]
z = z[:, 1:]
return None, dx, None, z
phi = tt(0).to(self.device)
dx = tt(0).to(self.device)
sc = tt(1).to(self.device)
if 'r' in self.invariances:
phi = z[:, 0]
z = z[:, 1:]
if 't' in self.invariances:
dx = z[:, :2]
z = z[:, 2:]
if 's' in self.invariances:
sc = sc + self.sc_prior * z[:, 0]
z = z[:, 1:]
return phi, dx, sc, z
def _encode(
self,
*input_args: Tuple[Union[torch.Tensor, List[torch.Tensor]]],
**kwargs: int
) -> torch.Tensor:
"""Encodes data using a trained inference (encoder) network
in a batch-by-batch fashion."""
def inference(x: Tuple[torch.Tensor]) -> torch.Tensor:
x = torch.cat(x, -1).to(self.device)
with torch.no_grad():
encoded = self.encoder_z(x)
encoded = torch.cat(encoded, -1).cpu()
return encoded
loader = init_dataloader(*input_args, shuffle=False, **kwargs)
z_encoded = []
for x in loader:
z_encoded.append(inference(x))
return torch.cat(z_encoded)
def _decode(self, z_new: torch.Tensor, **kwargs: int) -> torch.Tensor:
"""Decodes latent coordinates in a batch-by-batch fashion."""
def generator(z: List[torch.Tensor]) -> torch.Tensor:
with torch.no_grad():
loc = self.decoder(*z)
return loc.cpu()
z_new = init_dataloader(z_new, shuffle=False, **kwargs)
if self.invariances:
grid = self.grid
a = kwargs.get("angle", tt(0.)).to(self.device)
t = kwargs.get("shift", tt(0.)).to(self.device)
s = kwargs.get("scale", tt(1.)).to(self.device)
grid = transform_coordinates(
grid.unsqueeze(0), a.unsqueeze(0),
t.unsqueeze(0), s.unsqueeze(0))
grid = grid.squeeze()
x_decoded = []
for z in z_new:
if self.invariances:
z = [grid.expand(z[0].shape[0], *grid.shape)] + z
x_decoded.append(generator(z))
return torch.cat(x_decoded)
def set_encoder(self, encoder_net: Type[torch.nn.Module]) -> None:
"""Sets a user-defined encoder neural network."""
self.encoder_z = encoder_net.to(self.device)
def set_decoder(self, decoder_net: Type[torch.nn.Module]) -> None:
"""Sets a user-defined decoder neural network."""
self.decoder = decoder_net.to(self.device)
def save_weights(self, filepath: str) -> None:
"""Saves trained weights of encoder(s) and decoder."""
torch.save(self.state_dict(), filepath + '.pt')
def load_weights(self, filepath: str) -> None:
"""Loads saved weights of encoder(s) and decoder."""
weights = torch.load(filepath, map_location=self.device)
self.load_state_dict(weights)
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,203
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/models/jivae.py
|
"""
jivae.py
=========
Variational autoencoder for learning (jointly) discrete and
continuous latent representations of data with arbitrary affine transformations
(rotations, translations, and scale)
Created by Maxim Ziatdinov (email: ziatdinovmax@gmail.com)
"""
from typing import Tuple, Union, List
import pyro
import pyro.distributions as dist
import torch
from ..nets import fcDecoderNet, jfcEncoderNet, sDecoderNet
from ..utils import (generate_grid, generate_latent_grid,
generate_latent_grid_traversal, get_sampler,
plot_grid_traversal, plot_img_grid, plot_spect_grid,
set_deterministic_mode, to_onehot, transform_coordinates)
from .base import baseVAE
tt = torch.tensor
class jiVAE(baseVAE):
"""
Variational autoencoder for learning (jointly) discrete and
continuous latent representations of data while enforcing rotational,
translational, and scale invariances.
Args:
data_dim:
Dimensionality of the input data; (h x w) for images
or (length,) for spectra.
latent_dim:
Number of continuous latent dimensions.
discrete_dim:
Number of discrete latent dimensions.
invariances:
List with invariances to enforce. For 2D systems, `r` enforces
rotational invariance, `t` enforces invariance to
translations, `sc` enforces a scale invariance, and
invariances=None corresponds to vanilla VAE.
For 1D systems, 't' enforces translational invariance and
invariances=None is vanilla VAE
hidden_dim_e:
Number of hidden units per each layer in encoder (inference network).
hidden_dim_d:
Number of hidden units per each layer in decoder (generator network).
num_layers_e:
Number of layers in encoder (inference network).
num_layers_d:
Number of layers in decoder (generator network).
activation:
Non-linear activation for inner layers of encoder and decoder.
The available activations are ReLU ('relu'), leaky ReLU ('lrelu'),
hyberbolic tangent ('tanh'), softplus ('softplus'), and GELU ('gelu').
(The default is 'tanh').
sampler_d:
Decoder sampler, as defined as p(x|z) = sampler(decoder(z)).
The available samplers are 'bernoulli', 'continuous_bernoulli',
and 'gaussian' (Default: 'bernoulli').
sigmoid_d:
Sigmoid activation for the decoder output (Default: True).
seed:
Seed used in torch.manual_seed(seed) and
torch.cuda.manual_seed_all(seed).
Keyword Args:
device:
Sets device to which model and data will be moved.
Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.
dx_prior:
Translational prior in x direction (float between 0 and 1)
dy_prior:
Translational prior in y direction (float between 0 and 1)
sc_prior:
Scale prior (usually, sc_prior << 1)
decoder_sig:
Sets sigma for a "gaussian" decoder sampler
Examples:
Initialize a joint VAE model with rotational invariance for 10 discrete classes
>>> data_dim = (28, 28)
>>> jrvae = jiVAE(data_dim, latent_dim=2, discrete_dim=10, invariances=['r'])
"""
def __init__(self,
data_dim: Tuple[int],
latent_dim: int,
discrete_dim: int,
invariances: List[str] = None,
hidden_dim_e: int = 128,
hidden_dim_d: int = 128,
num_layers_e: int = 2,
num_layers_d: int = 2,
activation: str = "tanh",
sampler_d: str = "bernoulli",
sigmoid_d: bool = True,
seed: int = 1,
**kwargs: Union[str, float]
) -> None:
"""
Initializes j-iVAE's modules and parameters
"""
args = (data_dim, invariances)
super(jiVAE, self).__init__(*args, **kwargs)
pyro.clear_param_store()
set_deterministic_mode(seed)
self.data_dim = data_dim
# Initialize the Encoder NN
self.encoder_z = jfcEncoderNet(
data_dim, latent_dim+self.coord, discrete_dim, hidden_dim_e,
num_layers_e, activation, softplus_out=True)
# Initialize the Decoder NN
dnet = sDecoderNet if 0 < self.coord < 5 else fcDecoderNet
self.decoder = dnet(
data_dim, latent_dim, discrete_dim, hidden_dim_d,
num_layers_d, activation, sigmoid_out=sigmoid_d, unflat=False)
# Initialize the decoder's sampler
self.sampler_d = get_sampler(sampler_d, **kwargs)
# Set continuous and discrete dimensions
self.z_dim = latent_dim + self.coord
self.discrete_dim = discrete_dim
# Move model parameters to appropriate device
self.to(self.device)
def model(self,
x: torch.Tensor,
**kwargs: float) -> None:
"""
Defines the model p(x|z,c)p(z)p(c)
"""
# register PyTorch module `decoder` with Pyro
pyro.module("decoder", self.decoder)
# KLD scale factor (see e.g. https://openreview.net/pdf?id=Sy2fzU9gl)
beta = kwargs.get("scale_factor", [1., 1.])
if isinstance(beta, (float, int, list)):
beta = torch.tensor(beta)
if beta.ndim == 0:
beta = torch.tensor([beta, beta])
reshape_ = torch.prod(tt(x.shape[1:])).item()
bdim = x.shape[0]
with pyro.plate("data"):
# sample the continuous latent vector from the constant prior distribution
z_loc = x.new_zeros(torch.Size((bdim, self.z_dim)))
z_scale = x.new_ones(torch.Size((bdim, self.z_dim)))
# sample discrete latent vector from the constant prior
alpha = x.new_ones(torch.Size((bdim, self.discrete_dim))) / self.discrete_dim
# sample from prior (value will be sampled by guide when computing ELBO)
with pyro.poutine.scale(scale=beta[0]):
z = pyro.sample("latent_cont", dist.Normal(z_loc, z_scale).to_event(1))
with pyro.poutine.scale(scale=beta[1]):
z_disc = pyro.sample("latent_disc", dist.OneHotCategorical(alpha))
# split latent variable into parts for rotation and/or translation
# and image content
if self.coord > 0:
phi, dx, sc, z = self.split_latent(z.repeat(self.discrete_dim, 1))
if 't' in self.invariances:
dx = (dx * self.t_prior).unsqueeze(1)
# transform coordinate grid
grid = self.grid.expand(bdim*self.discrete_dim, *self.grid.shape)
x_coord_prime = transform_coordinates(grid, phi, dx, sc)
# Continuous and discrete latent variables for the decoder
z = [z, z_disc.reshape(-1, self.discrete_dim) if self.coord > 0 else z_disc]
# decode the latent code z together with the transformed coordinates (if any)
dec_args = (x_coord_prime, z) if self.coord else (z,)
loc = self.decoder(*dec_args)
# score against actual images/spectra
loc = loc.view(*z_disc.shape[:-1], reshape_)
pyro.sample(
"obs", self.sampler_d(loc).to_event(1),
obs=x.view(-1, reshape_))
def guide(self,
x: torch.Tensor,
**kwargs: float) -> None:
"""
Defines the guide q(z,c|x)
"""
# register PyTorch module `encoder_z` with Pyro
pyro.module("encoder_z", self.encoder_z)
# KLD scale factor (see e.g. https://openreview.net/pdf?id=Sy2fzU9gl)
beta = kwargs.get("scale_factor", [1., 1.])
if isinstance(beta, (float, int, list)):
beta = torch.tensor(beta)
if beta.ndim == 0:
beta = torch.tensor([beta, beta])
with pyro.plate("data"):
# use the encoder to get the parameters used to define q(z,c|x)
z_loc, z_scale, alpha = self.encoder_z(x)
# sample the latent code z
with pyro.poutine.scale(scale=beta[0]):
pyro.sample("latent_cont", dist.Normal(z_loc, z_scale).to_event(1))
with pyro.poutine.scale(scale=beta[1]):
pyro.sample("latent_disc", dist.OneHotCategorical(alpha))
def split_latent(self, z: torch.Tensor) -> Tuple[torch.Tensor]:
"""
Split latent variable into parts with rotation and/or translation
and image content
"""
return self._split_latent(z)
def encode(self,
x_new: torch.Tensor,
logits: bool = False,
**kwargs: int) -> torch.Tensor:
"""
Encodes data using a trained inference (encoder) network
Args:
x_new:
Data to encode with a trained j-iVAE. The new data must have
the same dimensions (images height and width or spectra length)
as the one used for training.
logits:
Return raw class probabilities (Default: False).
kwargs:
Batch size as 'batch_size' (for encoding large volumes of data).
"""
z = self._encode(x_new)
z_loc = z[:, :self.z_dim]
z_scale = z[:, self.z_dim:2*self.z_dim]
classes = z[:, 2*self.z_dim:]
if not logits:
_, classes = torch.max(classes, 1)
return z_loc, z_scale, classes
def decode(self, z: torch.Tensor, y: torch.Tensor, **kwargs: int) -> torch.Tensor:
"""
Decodes a batch of latent coordinates
Args:
z: Latent coordinates (without rotational and translational parts)
y: Classes as one-hot vectors for each sample in z
"""
z = torch.cat([z.to(self.device), y.to(self.device)], -1)
loc = self._decode(z, **kwargs)
return loc.view(-1, *self.data_dim)
def manifold2d(self, d: int, disc_idx: int = 0, plot: bool = True,
**kwargs: Union[str, int, float]) -> torch.Tensor:
"""
Plots a learned latent manifold in the data space
Args:
d: Grid size
disc_idx: Discrete dimension for which we plot continuous latent manifolds
plot: Plots the generated manifold (Default: True)
kwargs: Keyword arguments include custom min/max values for grid
boundaries passed as 'z_coord' (e.g. z_coord = [-3, 3, -3, 3]),
'angle' and 'shift' to condition a generative model on,
and plot parameters ('padding', 'padding_value', 'cmap', 'origin', 'ylim')
"""
z, (grid_x, grid_y) = generate_latent_grid(d, **kwargs)
z_disc = to_onehot(tt(disc_idx).unsqueeze(0), self.discrete_dim)
z_disc = z_disc.repeat(z.shape[0], 1)
loc = self.decode(z, z_disc, **kwargs)
if plot:
if self.ndim == 2:
plot_img_grid(
loc, d,
extent=[grid_x.min(), grid_x.max(), grid_y.min(), grid_y.max()],
**kwargs)
elif self.ndim == 1:
plot_spect_grid(loc, d, **kwargs)
return loc
def manifold_traversal(self, d: int, cont_idx: int, cont_idx_fixed: int = 0,
plot: bool = True, **kwargs: Union[str, int, float]
) -> torch.Tensor:
"""
Latent space traversal for joint continuous and discrete
latent representations
Args:
d: Grid size
cont_idx:
Continuous latent variable used for plotting
a latent manifold traversal
cont_idx_fixed:
Value which the remaining continuous latent variables are fixed at
plot:
Plots the generated manifold (Default: True)
kwargs:
Keyword arguments include custom min/max values for grid
boundaries passed as 'z_coord' (e.g. z_coord = [-3, 3, -3, 3]),
'angle' and 'shift' to condition a generative model one,
and plot parameters ('padding', 'padding_value', 'cmap', 'origin', 'ylim')
"""
num_samples = d**2
disc_dim = self.discrete_dim
cont_dim = self.z_dim - self.coord
data_dim = self.data_dim
# Get continuous and discrete latent coordinates
samples_cont, samples_disc = generate_latent_grid_traversal(
d, cont_dim, disc_dim, cont_idx, cont_idx_fixed, num_samples)
# Pass discrete and continuous latent coordinates through a decoder
decoded = self.decode(samples_cont, samples_disc, **kwargs)
if plot:
plot_grid_traversal(decoded, d, data_dim, disc_dim, **kwargs)
return decoded
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,204
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/models/ved.py
|
"""
ved.py
=========
Variational encoder-decoder model (input and output are different)
Created by Maxim Ziatdinov (email: ziatdinovmax@gmail.com)
"""
from typing import Tuple, Union, List
import pyro
import pyro.distributions as dist
import torch
from .base import baseVAE
from ..nets import convEncoderNet, convDecoderNet
from ..utils import (generate_latent_grid, get_sampler,
init_dataloader, plot_img_grid, plot_spect_grid,
set_deterministic_mode)
class VED(baseVAE):
"""
Variational encoder-decoder model where the inputs and outputs are not identical.
This model can be used for realizing im2spec and spec2im type of models where
1D spectra are predicted from image data and vice versa.
Args:
input_dim:
Dimensionality of the input data; use (h x w) for images
or (length,) for spectra.
output_dim:
Dimensionality of the input data; use (h x w) for images
or (length,) for spectra. Doesn't have to match the input data.
input_channels:
Number of input channels (Default: 1)
output_channels:
Number of output channels (Default: 1)
latent_dim:
Number of latent dimensions.
hidden_dim_e:
Number of hidden units (convolutional filters) for each layer in
the first block of the encoder NN. The number of units in the
consecutive blocks is defined as hidden_dim_e * n,
where n = 2, 3, ..., n_blocks (Default: 32).
hidden_dim_d:
Number of hidden units (convolutional filters) for each layer in
the first block of the decoder NN. The number of units in the
consecutive blocks is defined as hidden_dim_e // n,
where n = 2, 3, ..., n_blocks (Default: 96).
num_layers_e:
List with numbers of layers per each block of the encoder NN.
Defaults to [1, 2, 2] if none is specified.
num_layers_d:
List with numbers of layers per each block of the decoder NN.
Defaults to [2, 2, 1] if none is specified.
activation:
activation:
Non-linear activation for inner layers of encoder and decoder.
The available activations are ReLU ('relu'), leaky ReLU ('lrelu'),
hyberbolic tangent ('tanh'), softplus ('softplus'), and GELU ('gelu').
(The default is 'lrelu').
batchnorm:
Batch normalization attached to each convolutional layer
after non-linear activation (except for layers with 1x1 filters)
in the encoder and decoder NNs (Default: False)
sampler_d:
Decoder sampler, as defined as p(x|z) = sampler(decoder(z)).
The available samplers are 'bernoulli', 'continuous_bernoulli',
and 'gaussian' (Default: 'bernoulli').
sigmoid_d:
Sigmoid activation for the decoder output (Default: True)
seed:
Seed used in torch.manual_seed(seed) and
torch.cuda.manual_seed_all(seed)
kwargs:
Additional keyword argument is *decoder_sig* for setting sigma
in the decoder's sampler when it is chosen to be a "gaussian".
Examples:
Initialize a VED model for predicting 1D spectra from 2D images
>>> input_dim = (32, 32) # image height and width
>>> output_dim = (16,) # spectrum length
>>> ved = VED(input_dim, output_dim, latent_dim=2)
"""
def __init__(self,
input_dim: Tuple[int],
output_dim: Tuple[int],
input_channels: int = 1,
output_channels: int = 1,
latent_dim: int = 2,
hidden_dim_e: int = 32,
hidden_dim_d: int = 96,
num_layers_e: List[int] = None,
num_layers_d: List[int] = None,
activation: str = "lrelu",
batchnorm: bool = False,
sampler_d: str = "bernoulli",
sigmoid_d: bool = True,
seed: int = 1,
**kwargs: float
) -> None:
"""
Initializes VED's modules and parameters
"""
super(VED, self).__init__(output_dim, None, **kwargs)
pyro.clear_param_store()
set_deterministic_mode(seed)
self.device = 'cuda' if torch.cuda.is_available() else 'cpu'
self.ndim = len(output_dim)
self.encoder_z = convEncoderNet(
input_dim, input_channels, latent_dim,
num_layers_e, hidden_dim_e,
batchnorm, activation)
self.decoder = convDecoderNet(
latent_dim, output_dim, output_channels,
num_layers_d, hidden_dim_d,
batchnorm, activation, sigmoid_d)
self.sampler_d = get_sampler(sampler_d, **kwargs)
self.z_dim = latent_dim
self.to(self.device)
def model(self,
x: torch.Tensor = None,
y: torch.Tensor = None,
**kwargs: float) -> None:
"""
Defines the model p(y|z)p(z)
"""
# register PyTorch module `decoder` with Pyro
pyro.module("decoder", self.decoder)
# KLD scale factor (see e.g. https://openreview.net/pdf?id=Sy2fzU9gl)
beta = kwargs.get("scale_factor", 1.)
with pyro.plate("data", x.shape[0]):
# setup hyperparameters for prior p(z)
z_loc = x.new_zeros(torch.Size((x.shape[0], self.z_dim)))
z_scale = x.new_ones(torch.Size((x.shape[0], self.z_dim)))
# sample from prior (value will be sampled by guide when computing the ELBO)
with pyro.poutine.scale(scale=beta):
z = pyro.sample("z", dist.Normal(z_loc, z_scale).to_event(1))
# decode the latent code z
loc = self.decoder(z)
# score against actual images
pyro.sample(
"obs", self.sampler_d(loc.flatten(1)).to_event(1),
obs=y.flatten(1))
def guide(self,
x: torch.Tensor = None,
y: torch.Tensor = None,
**kwargs: float) -> None:
"""
Defines the guide q(z|x)
"""
# register PyTorch module `encoder_z` with Pyro
pyro.module("encoder_z", self.encoder_z)
# KLD scale factor (see e.g. https://openreview.net/pdf?id=Sy2fzU9gl)
beta = kwargs.get("scale_factor", 1.)
with pyro.plate("data", x.shape[0]):
# use the encoder to get the parameters used to define q(z|x)
z_loc, z_scale = self.encoder_z(x)
# sample the latent code z
with pyro.poutine.scale(scale=beta):
pyro.sample("z", dist.Normal(z_loc, z_scale).to_event(1))
def encode(self, x_new: torch.Tensor, **kwargs: int) -> torch.Tensor:
"""
Encodes data using a trained inference (encoder) network
Args:
x_new:
Data to encode with a trained trVAE. The new data must have
the same dimensions (images height and width or spectra length)
as the one used for training.
kwargs:
Batch size as 'batch_size' (for encoding large volumes of data)
"""
self.eval()
z = self._encode(x_new)
z_loc, z_scale = z.split(self.z_dim, 1)
return z_loc, z_scale
def decode(self,
z: torch.Tensor,
**kwargs: int) -> torch.Tensor:
"""
Decodes a batch of latent coordnates
Args:
z: Latent coordinates
"""
self.eval()
z = z.to(self.device)
loc = self._decode(z, **kwargs)
return loc
def predict(self, x_new: torch.Tensor, **kwargs: int) -> torch.Tensor:
"""Forward prediction (encode -> sample -> decode)"""
def forward_(x_i) -> torch.Tensor:
with torch.no_grad():
encoded = self.encoder_z(x_i)
encoded = torch.cat(encoded, -1)
z_mu, z_sig = encoded.split(self.z_dim, 1)
z_samples = dist.Normal(z_mu, z_sig).rsample(sample_shape=(30,))
y = torch.cat([self.decoder(z)[None] for z in z_samples])
return y.mean(0).cpu(), y.std(0).cpu()
x_new = init_dataloader(x_new, shuffle=False, **kwargs)
prediction_mu, prediction_sd = [], []
for (x_i,) in x_new:
y_mu, y_sd = forward_(x_i.to(self.device))
prediction_mu.append(y_mu)
prediction_sd.append(y_sd)
return torch.cat(prediction_mu), torch.cat(prediction_sd)
def manifold2d(self, d: int, plot: bool = True,
**kwargs: Union[str, int]) -> torch.Tensor:
"""
Plots a learned latent manifold in the image space
Args:
d: Grid size
plot: Plots the generated manifold (Default: True)
kwargs: Keyword arguments include custom min/max values for grid
boundaries passed as 'z_coord' (e.g. z_coord = [-3, 3, -3, 3])
and plot parameters ('padding', 'padding_value', 'cmap', 'origin', 'ylim')
"""
self.eval()
z, (grid_x, grid_y) = generate_latent_grid(d, **kwargs)
z = z.to(self.device)
with torch.no_grad():
loc = self.decoder(z).cpu()
if plot:
if self.ndim == 2:
plot_img_grid(
loc, d,
extent=[grid_x.min(), grid_x.max(), grid_y.min(), grid_y.max()],
**kwargs)
elif self.ndim == 1:
plot_spect_grid(loc, d, **kwargs)
return loc
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,205
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/nets/__init__.py
|
"""
Fully-connected and convolutional neural network modules
"""
from .conv import (ConvBlock, FeatureExtractor, UpsampleBlock, Upsampler,
convDecoderNet, convEncoderNet)
from .fc import (fcClassifierNet, fcDecoderNet, fcEncoderNet, jfcEncoderNet,
sDecoderNet, fcRegressorNet)
__all__ = ["fcEncoderNet", "fcDecoderNet", "sDecoderNet", "fcRegressorNet",
"fcClassifierNet", "jfcEncoderNet", "ConvBlock", "UpsampleBlock",
"FeatureExtractor", "Upsampler", "convEncoderNet", "convDecoderNet"]
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,206
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/nets/fc.py
|
"""
fc.py
Module for creating fully-connected encoder and decoder modules
Created by Maxim Ziatdinov (ziatdinovmax@gmail.com)
"""
from typing import List, Tuple, Type, Union
import torch
import torch.nn as nn
from pyro.distributions.util import broadcast_shape
from ..utils import get_activation
tt = torch.tensor
class Concat(nn.Module):
"""
Module for concatenation of tensors
"""
def __init__(self, allow_broadcast: bool = True):
"""
Initializes module
"""
self.allow_broadcast = allow_broadcast
super().__init__()
def forward(self, input_args: Union[List[torch.Tensor], torch.Tensor]
) -> torch.Tensor:
"""
Performs concatenation
"""
if torch.is_tensor(input_args):
return input_args
if self.allow_broadcast:
shape = broadcast_shape(*[s.shape[:-1] for s in input_args]) + (-1,)
input_args = [s.expand(shape) for s in input_args]
out = torch.cat(input_args, dim=-1)
return out
class fcEncoderNet(nn.Module):
"""
Standard fully-connected encoder NN for VAE.
The encoder outputs mean and standard evidation of the encoded distribution.
"""
def __init__(self,
in_dim: Tuple[int],
latent_dim: int = 2,
c_dim: int = 0,
hidden_dim: int = 128,
num_layers: int = 2,
activation: str = 'tanh',
softplus_out: bool = True,
flat: bool = True
) -> None:
"""
Initializes module
"""
super(fcEncoderNet, self).__init__()
if len(in_dim) not in [1, 2, 3]:
raise ValueError("in_dim must be (h, w), (h, w, c), or (l,)")
self.in_dim = torch.prod(tt(in_dim)).item() + c_dim
self.flat = flat
self.concat = Concat()
self.fc_layers = make_fc_layers(
self.in_dim, hidden_dim, num_layers, activation)
self.fc11 = nn.Linear(hidden_dim, latent_dim)
self.fc12 = nn.Linear(hidden_dim, latent_dim)
self.activation_out = nn.Softplus() if softplus_out else lambda x: x
def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor]:
"""
Forward pass
"""
x = self.concat(x)
if self.flat:
x = x.view(-1, self.in_dim)
x = self.fc_layers(x)
mu = self.fc11(x)
sigma = self.activation_out(self.fc12(x))
return mu, sigma
class jfcEncoderNet(nn.Module):
"""
Fully-connected encoder for joint VAE.
The encoder outputs mean, standard evidation and class probabilities.
"""
def __init__(self,
in_dim: Tuple[int],
latent_dim: int = 2,
discrete_dim: int = 0,
hidden_dim: int = 128,
num_layers: int = 2,
activation: str = 'tanh',
softplus_out: bool = True,
flat: bool = True
) -> None:
"""
Initializes module
"""
super(jfcEncoderNet, self).__init__()
if len(in_dim) not in [1, 2, 3]:
raise ValueError("in_dim must be (h, w), (h, w, c), or (l,)")
self.in_dim = torch.prod(tt(in_dim)).item()
self.flat = flat
self.concat = Concat()
self.fc_layers = make_fc_layers(
self.in_dim, hidden_dim, num_layers, activation)
self.fc11 = nn.Linear(hidden_dim, latent_dim)
self.fc12 = nn.Linear(hidden_dim, latent_dim)
self.fc13 = nn.Linear(hidden_dim, discrete_dim)
self.activation_out = nn.Softplus() if softplus_out else lambda x: x
def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor]:
"""
Forward pass
"""
x = self.concat(x)
if self.flat:
x = x.view(-1, self.in_dim)
x = self.fc_layers(x)
mu = self.fc11(x)
sigma = self.activation_out(self.fc12(x))
alpha = torch.softmax(self.fc13(x), dim=-1)
return mu, sigma, alpha
class fcDecoderNet(nn.Module):
"""
Standard fully-connected decoder for VAE
"""
def __init__(self,
out_dim: Tuple[int],
latent_dim: int,
c_dim: int = 0,
hidden_dim: int = 128,
num_layers: int = 2,
activation: str = 'tanh',
sigmoid_out: bool = True,
unflat: bool = True
) -> None:
"""
Initializes module
"""
super(fcDecoderNet, self).__init__()
if len(out_dim) not in [1, 2, 3]:
raise ValueError("in_dim must be (h, w), (h, w, c), or (l,)")
self.unflat = unflat
if self.unflat:
self.reshape = out_dim
out_dim = torch.prod(tt(out_dim)).item()
self.concat = Concat()
self.fc_layers = make_fc_layers(
latent_dim+c_dim, hidden_dim, num_layers, activation)
self.out = nn.Linear(hidden_dim, out_dim)
self.activation_out = nn.Sigmoid() if sigmoid_out else lambda x: x
def forward(self, z: torch.Tensor) -> torch.Tensor:
"""
Forward pass
"""
z = self.concat(z)
x = self.fc_layers(z)
x = self.activation_out(self.out(x))
if self.unflat:
return x.view(-1, *self.reshape)
return x
class sDecoderNet(nn.Module):
"""
Spatial generator (decoder) network with fully-connected layers
"""
def __init__(self,
out_dim: Tuple[int],
latent_dim: int,
c_dim: int = 0,
hidden_dim: int = 128,
num_layers: int = 2,
activation: str = 'tanh',
sigmoid_out: bool = True,
unflat: bool = True
) -> None:
"""
Initializes module
"""
super(sDecoderNet, self).__init__()
if len(out_dim) not in [1, 2, 3]:
raise ValueError("in_dim must be (h, w), (h, w, c), or (l,)")
self.unflat = unflat
if self.unflat:
self.reshape = out_dim
coord_dim = 1 if len(out_dim) < 2 else 2
self.concat = Concat()
self.coord_latent = coord_latent(
latent_dim+c_dim, hidden_dim, coord_dim)
self.fc_layers = make_fc_layers(
hidden_dim, hidden_dim, num_layers, activation)
self.out = nn.Linear(hidden_dim, 1) # need to generalize to multi-channel (c > 1)
self.activation_out = nn.Sigmoid() if sigmoid_out else lambda x: x
def forward(self, x_coord: torch.Tensor, z: torch.Tensor) -> torch.Tensor:
"""
Forward pass
"""
z = self.concat(z)
x = self.coord_latent(x_coord, z)
x = self.fc_layers(x)
x = self.activation_out(self.out(x))
if self.unflat:
return x.view(-1, *self.reshape)
return x
class coord_latent(nn.Module):
"""
The "spatial" part of the trVAE's decoder that allows for translational
and rotational invariance (based on https://arxiv.org/abs/1909.11663)
"""
def __init__(self,
latent_dim: int,
out_dim: int,
ndim: int = 2,
activation_out: bool = True) -> None:
"""
Initializes module
"""
super(coord_latent, self).__init__()
self.fc_coord = nn.Linear(ndim, out_dim)
self.fc_latent = nn.Linear(latent_dim, out_dim, bias=False)
self.activation = nn.Tanh() if activation_out else None
def forward(self,
x_coord: torch.Tensor,
z: Tuple[torch.Tensor]) -> torch.Tensor:
batch_dim, n = x_coord.size()[:2]
x_coord = x_coord.reshape(batch_dim * n, -1)
h_x = self.fc_coord(x_coord)
h_x = h_x.reshape(batch_dim, n, -1)
h_z = self.fc_latent(z)
h_z = h_z.view(-1, h_z.size(-1))
h = h_x.add(h_z.unsqueeze(1))
h = h.reshape(batch_dim * n, -1)
if self.activation is not None:
h = self.activation(h)
return h
class fcClassifierNet(nn.Module):
"""
Simple classification neural network with fully-connected layers only.
"""
def __init__(self,
in_dim: Tuple[int],
num_classes: int,
hidden_dim: int = 128,
num_layers: int = 2,
activation: str = 'tanh'
) -> None:
"""
Initializes module
"""
super(fcClassifierNet, self).__init__()
if len(in_dim) not in [1, 2, 3]:
raise ValueError("in_dim must be (h, w), (h, w, c), or (l,)")
self.in_dim = torch.prod(tt(in_dim)).item()
self.fc_layers = make_fc_layers(
self.in_dim, hidden_dim, num_layers, activation)
self.out = nn.Linear(hidden_dim, num_classes)
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Forward pass
"""
x = self.fc_layers(x)
x = self.out(x)
return torch.softmax(x, dim=-1)
class fcRegressorNet(nn.Module):
"""
Simple classification neural network with fully-connected layers only.
"""
def __init__(self,
in_dim: Tuple[int],
c_dim: int,
hidden_dim: int = 128,
num_layers: int = 2,
activation: str = 'tanh'
) -> None:
"""
Initializes module
"""
super(fcRegressorNet, self).__init__()
if len(in_dim) not in [1, 2, 3]:
raise ValueError("in_dim must be (h, w), (h, w, c), or (l,)")
self.in_dim = torch.prod(tt(in_dim)).item()
self.fc_layers = make_fc_layers(
self.in_dim, hidden_dim, num_layers, activation)
self.out = nn.Linear(hidden_dim, c_dim)
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Forward pass
"""
x = self.fc_layers(x)
return self.out(x)
def make_fc_layers(in_dim: int,
hidden_dim: int = 128,
num_layers: int = 2,
activation: str = "tanh"
) -> Type[nn.Module]:
"""
Generates a module with stacked fully-connected (aka dense) layers
"""
fc_layers = []
for i in range(num_layers):
hidden_dim_ = in_dim if i == 0 else hidden_dim
fc_layers.extend(
[nn.Linear(hidden_dim_, hidden_dim),
get_activation(activation)()])
fc_layers = nn.Sequential(*fc_layers)
return fc_layers
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,207
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/utils/data.py
|
from typing import Tuple, Type
import torch
def init_dataloader(*args: torch.Tensor,
random_sampler: bool = False,
shuffle: bool = True,
**kwargs: int
) -> Type[torch.utils.data.DataLoader]:
"""
Returns initialized PyTorch dataloader, which is used by pyroVED's trainers.
The inputs are torch Tensor objects containing training data and (optionally)
labels.
Example:
>>> # Load training data stored as numpy array
>>> train_data = np.load("my_training_data.npy")
>>> # Transform numpy array to toech Tensor object
>>> train_data = torch.from_numpy(train_data).float()
>>> # Initialize dataloader
>>> train_loader = init_dataloader(train_data)
"""
batch_size = kwargs.get("batch_size", 100)
tensor_set = torch.utils.data.dataset.TensorDataset(*args)
if random_sampler:
sampler = torch.utils.data.RandomSampler(tensor_set)
data_loader = torch.utils.data.DataLoader(
dataset=tensor_set, batch_size=batch_size, sampler=sampler)
else:
data_loader = torch.utils.data.DataLoader(
dataset=tensor_set, batch_size=batch_size, shuffle=shuffle)
return data_loader
def init_ssvae_dataloaders(data_unsup: torch.Tensor,
data_sup: Tuple[torch.Tensor],
data_val: Tuple[torch.Tensor],
**kwargs: int
) -> Tuple[Type[torch.utils.data.DataLoader]]:
"""
Helper function to initialize dataloader for ss-VAE models
"""
loader_unsup = init_dataloader(data_unsup, **kwargs)
loader_sup = init_dataloader(*data_sup, sampler=True, **kwargs)
loader_val = init_dataloader(*data_val, **kwargs)
return loader_unsup, loader_sup, loader_val
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,208
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/models/ivae.py
|
"""
ivae.py
=======
Variational autoencoder with invariance to rotations, translations, and scale
Created by Maxim Ziatdinov (email: ziatdinovmax@gmail.com)
"""
from typing import Optional, Tuple, Union, List
import pyro
import pyro.distributions as dist
import torch
from pyroved.models.base import baseVAE
from pyroved.nets import fcDecoderNet, fcEncoderNet, sDecoderNet
from pyroved.utils import (
generate_grid, generate_latent_grid, get_sampler,
plot_img_grid, plot_spect_grid, set_deterministic_mode,
to_onehot, transform_coordinates
)
class iVAE(baseVAE):
"""
Variational autoencoder that enforces rotational, translational,
and scale invariances.
Args:
data_dim:
Dimensionality of the input data; use (height x width) for images
or (length,) for spectra.
latent_dim:
Number of latent dimensions.
invariances:
List with invariances to enforce. For 2D systems, `r` enforces
rotational invariance, `t` enforces invariance to
translations, `sc` enforces a scale invariance, and
invariances=None corresponds to vanilla VAE.
For 1D systems, 't' enforces translational invariance and
invariances=None is vanilla VAE
c_dim:
"Feature dimension" of the c vector in p(z|c) where z is
explicitly conditioned on variable c. The latter can be continuous
or discrete. For example, to train a class-conditional VAE on
a dataset with 10 classes, the c_dim must be equal to 10 and
the corresponding n x 10 vector should represent one-hot encoded labels.
(The default c_dim value is 0, i.e. no conditioning is performed).
hidden_dim_e:
Number of hidden units per each layer in encoder (inference
network). (The default is 128).
hidden_dim_d:
Number of hidden units per each layer in decoder (generator
network). (The default is 128).
num_layers_e:
Number of layers in encoder (inference network). (The default is
2).
num_layers_d:
Number of layers in decoder (generator network). (The default is
2).
activation:
Non-linear activation for inner layers of encoder and decoder.
The available activations are ReLU ('relu'), leaky ReLU ('lrelu'),
hyberbolic tangent ('tanh'), softplus ('softplus'), and GELU ('gelu').
(The default is 'tanh').
sampler_d:
Decoder sampler, as defined as p(x|z) = sampler(decoder(z)).
The available samplers are 'bernoulli', 'continuous_bernoulli',
and 'gaussian'. (The default is "bernoulli").
sigmoid_d:
Sigmoid activation for the decoder output. (The default is True).
seed:
Seed used in torch.manual_seed(seed) and
torch.cuda.manual_seed_all(seed). (The default is 1).
Keyword Args:
device:
Sets device to which model and data will be moved.
Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.
dx_prior:
Translational prior in x direction (float between 0 and 1)
dy_prior:
Translational prior in y direction (float between 0 and 1)
sc_prior:
Scale prior (usually, sc_prior << 1)
decoder_sig:
Sets sigma for a "gaussian" decoder sampler
Examples:
Initialize a VAE model with rotational invariance
>>> data_dim = (28, 28)
>>> rvae = iVAE(data_dim, latent_dim=2, invariances=['r'])
Initialize a class-conditional VAE model with rotational and
translational invarainces for dataset that has 10 classes
>>> data_dim = (28, 28)
>>> rvae = iVAE(data_dim, latent_dim=2, c_dim=10, invariances=['r', 't'])
"""
def __init__(
self,
data_dim: Tuple[int],
latent_dim: int = 2,
invariances: List[str] = None,
c_dim: int = 0,
hidden_dim_e: int = 128,
hidden_dim_d: int = 128,
num_layers_e: int = 2,
num_layers_d: int = 2,
activation: str = "tanh",
sampler_d: str = "bernoulli",
sigmoid_d: bool = True,
seed: int = 1,
**kwargs: Union[str, float]
) -> None:
args = (data_dim, invariances)
super(iVAE, self).__init__(*args, **kwargs)
# Reset the pyro ParamStoreDict object's dictionaries
pyro.clear_param_store()
# Set all torch manual seeds
set_deterministic_mode(seed)
# Initialize the encoder network
self.encoder_z = fcEncoderNet(
data_dim, latent_dim + self.coord, 0, hidden_dim_e, num_layers_e,
activation, softplus_out=True
)
# Initialize the decoder network
dnet = sDecoderNet if 0 < self.coord < 5 else fcDecoderNet
self.decoder = dnet(
data_dim, latent_dim, c_dim, hidden_dim_d, num_layers_d,
activation, sigmoid_out=sigmoid_d
)
# Initialize the decoder's sampler
self.sampler_d = get_sampler(sampler_d, **kwargs)
# Sets continuous and discrete dimensions
self.z_dim = latent_dim + self.coord
self.c_dim = c_dim
# Move model parameters to appropriate device
self.to(self.device)
def model(self,
x: torch.Tensor,
y: Optional[torch.Tensor] = None,
**kwargs: float) -> None:
"""
Defines the model p(x|z)p(z)
"""
# register PyTorch module `decoder` with Pyro
pyro.module("decoder", self.decoder)
# KLD scale factor (see e.g. https://openreview.net/pdf?id=Sy2fzU9gl)
beta = kwargs.get("scale_factor", 1.)
reshape_ = torch.prod(torch.tensor(x.shape[1:])).item()
with pyro.plate("data", x.shape[0]):
# setup hyperparameters for prior p(z)
z_loc = x.new_zeros(torch.Size((x.shape[0], self.z_dim)))
z_scale = x.new_ones(torch.Size((x.shape[0], self.z_dim)))
# sample from prior (value will be sampled by guide when computing the ELBO)
with pyro.poutine.scale(scale=beta):
z = pyro.sample("latent", dist.Normal(z_loc, z_scale).to_event(1))
if self.coord > 0: # rotationally- and/or translationaly-invariant mode
# Split latent variable into parts for rotation
# and/or translation and image content
phi, dx, sc, z = self.split_latent(z)
if 't' in self.invariances:
dx = (dx * self.t_prior).unsqueeze(1)
# transform coordinate grid
grid = self.grid.expand(x.shape[0], *self.grid.shape)
x_coord_prime = transform_coordinates(grid, phi, dx, sc)
# Add class label (if any)
if y is not None:
z = torch.cat([z, y], dim=-1)
# decode the latent code z together with the transformed coordinates (if any)
dec_args = (x_coord_prime, z) if self.coord else (z,)
loc = self.decoder(*dec_args)
# score against actual images ("binary cross-entropy loss")
pyro.sample(
"obs", self.sampler_d(loc.view(-1, reshape_)).to_event(1),
obs=x.view(-1, reshape_))
def guide(self,
x: torch.Tensor,
y: Optional[torch.Tensor] = None,
**kwargs: float) -> None:
"""
Defines the guide q(z|x)
"""
# register PyTorch module `encoder_z` with Pyro
pyro.module("encoder_z", self.encoder_z)
# KLD scale factor (see e.g. https://openreview.net/pdf?id=Sy2fzU9gl)
beta = kwargs.get("scale_factor", 1.)
with pyro.plate("data", x.shape[0]):
# use the encoder to get the parameters used to define q(z|x)
z_loc, z_scale = self.encoder_z(x)
# sample the latent code z
with pyro.poutine.scale(scale=beta):
pyro.sample("latent", dist.Normal(z_loc, z_scale).to_event(1))
def split_latent(self, z: torch.Tensor) -> Tuple[torch.Tensor]:
"""
Split latent variable into parts for rotation
and/or translation and image content
"""
return self._split_latent(z)
def encode(self, x_new: torch.Tensor, **kwargs: int) -> torch.Tensor:
"""
Encodes data using a trained inference (encoder) network
Args:
x_new:
Data to encode with a trained (i)VAE model. The new data must have
the same dimensions (images height and width or spectra length)
as the one used for training.
kwargs:
Batch size as 'batch_size' (for encoding large volumes of data)
"""
z = self._encode(x_new)
z_loc, z_scale = z.split(self.z_dim, 1)
return z_loc, z_scale
def decode(self,
z: torch.Tensor,
y: torch.Tensor = None,
**kwargs: int) -> torch.Tensor:
"""
Decodes a batch of latent coordnates
Args:
z: Latent coordinates (without rotational and translational parts)
y: Conditional "property" vector (e.g. one-hot encoded class vector)
kwargs: Batch size as 'batch_size'
"""
z = z.to(self.device)
if y is not None:
z = torch.cat([z, y.to(self.device)], -1)
loc = self._decode(z, **kwargs)
return loc
def manifold2d(self, d: int,
y: torch.Tensor = None,
plot: bool = True,
**kwargs: Union[str, int, float]) -> torch.Tensor:
"""
Plots a learned latent manifold in the image space
Args:
d: Grid size
plot: Plots the generated manifold (Default: True)
y: Conditional "property" vector (e.g. one-hot encoded class vector)
kwargs: Keyword arguments include custom min/max values
for grid boundaries passed as 'z_coord'
(e.g. z_coord = [-3, 3, -3, 3]), 'angle' and
'shift' to condition a generative model on, and plot parameters
('padding', 'padding_value', 'cmap', 'origin', 'ylim')
"""
z, (grid_x, grid_y) = generate_latent_grid(d, **kwargs)
z = [z]
if self.c_dim > 0:
if y is None:
raise ValueError("To generate a manifold pass a conditional vector y")
y = y.unsqueeze(1) if 0 < y.ndim < 2 else y
z = z + [y.expand(z[0].shape[0], *y.shape[1:])]
loc = self.decode(*z, **kwargs)
if plot:
if self.ndim == 2:
plot_img_grid(
loc, d,
extent=[grid_x.min(), grid_x.max(), grid_y.min(), grid_y.max()],
**kwargs)
elif self.ndim == 1:
plot_spect_grid(loc, d, **kwargs)
return loc
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,209
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/tests/test_trainers.py
|
import sys
from copy import deepcopy as dc
import torch
import pyro.distributions as dist
import pytest
import numpy as np
from numpy.testing import assert_
sys.path.append("../../")
from pyroved import models, utils, trainers
tt = torch.tensor
def assert_weights_equal(m1, m2):
eq_w = []
for p1, p2 in zip(m1.values(), m2.values()):
eq_w.append(np.array_equal(
p1.detach().cpu().numpy(),
p2.detach().cpu().numpy()))
return all(eq_w)
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])
def test_svi_trainer_trvae(invariances):
data_dim = (5, 8, 8)
train_data = torch.randn(*data_dim)
test_data = torch.randn(*data_dim)
train_loader = utils.init_dataloader(train_data, batch_size=2)
test_loader = utils.init_dataloader(test_data, batch_size=2)
vae = models.iVAE(data_dim[1:], 2, invariances)
trainer = trainers.SVItrainer(vae)
weights_before = dc(vae.state_dict())
for _ in range(2):
trainer.step(train_loader, test_loader)
weights_after = vae.state_dict()
assert_(not torch.isnan(tt(trainer.loss_history["training_loss"])).any())
assert_(not assert_weights_equal(weights_before, weights_after))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])
def test_svi_trainer_jtrvae(invariances):
data_dim = (6, 8, 8)
train_data = torch.randn(*data_dim)
train_loader = utils.init_dataloader(train_data, batch_size=2)
vae = models.jiVAE(data_dim[1:], 2, 3, invariances)
trainer = trainers.SVItrainer(vae, enumerate_parallel=True)
weights_before = dc(vae.state_dict())
for _ in range(2):
trainer.step(train_loader)
weights_after = vae.state_dict()
assert_(not torch.isnan(tt(trainer.loss_history["training_loss"])).any())
assert_(not assert_weights_equal(weights_before, weights_after))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])
def test_auxsvi_trainer_cls(invariances):
data_dim = (5, 8, 8)
train_unsup = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())
train_sup = train_unsup + .1 * torch.randn_like(train_unsup)
labels = dist.OneHotCategorical(torch.ones(data_dim[0], 3)).sample()
loader_unsup, loader_sup, loader_val = utils.init_ssvae_dataloaders(
train_unsup, (train_sup, labels), (train_sup, labels), batch_size=2)
vae = models.ssiVAE(data_dim[1:], 2, 3, invariances)
trainer = trainers.auxSVItrainer(vae)
weights_before = dc(vae.state_dict())
for _ in range(2):
trainer.step(loader_unsup, loader_sup, loader_val)
weights_after = vae.state_dict()
assert_(not torch.isnan(tt(trainer.history["training_loss"])).any())
assert_(not assert_weights_equal(weights_before, weights_after))
@pytest.mark.parametrize("c_dim", [1, 2])
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])
def test_auxsvi_trainer_reg(c_dim, invariances):
data_dim = (5, 8, 8)
train_unsup = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())
train_sup = train_unsup + .1 * torch.randn_like(train_unsup)
gt = torch.randn(data_dim[0], c_dim)
loader_unsup, loader_sup, loader_val = utils.init_ssvae_dataloaders(
train_unsup, (train_sup, gt), (train_sup, gt), batch_size=2)
vae = models.ss_reg_iVAE(data_dim[1:], 2, c_dim, invariances)
trainer = trainers.auxSVItrainer(vae, task="regression")
weights_before = dc(vae.state_dict())
for _ in range(2):
trainer.step(loader_unsup, loader_sup, loader_val)
weights_after = vae.state_dict()
assert_(not torch.isnan(tt(trainer.history["training_loss"])).any())
assert_(not assert_weights_equal(weights_before, weights_after))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])
def test_auxsvi_trainer_swa(invariances):
data_dim = (5, 8, 8)
train_unsup = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())
train_sup = train_unsup + .1 * torch.randn_like(train_unsup)
labels = dist.OneHotCategorical(torch.ones(data_dim[0], 3)).sample()
loader_unsup, loader_sup, _ = utils.init_ssvae_dataloaders(
train_unsup, (train_sup, labels), (train_sup, labels), batch_size=2)
vae = models.ssiVAE(data_dim[1:], 2, 3, invariances)
trainer = trainers.auxSVItrainer(vae)
for _ in range(3):
trainer.step(loader_unsup, loader_sup)
trainer.save_running_weights("encoder_y")
weights_final = dc(vae.encoder_y.state_dict())
trainer.average_weights("encoder_y")
weights_aver = vae.encoder_y.state_dict()
assert_(not assert_weights_equal(weights_final, weights_aver))
@pytest.mark.parametrize("input_dim, output_dim",
[((8,), (8, 8)), ((8, 8), (8,)),
((8,), (8,)), ((8, 8), (8, 8))])
def test_svi_trainer_ved(input_dim, output_dim):
train_data_x = torch.randn(5, 1, *input_dim)
train_data_y = torch.randn(5, 1, *output_dim)
train_loader = utils.init_dataloader(train_data_x, train_data_y, batch_size=2)
vae = models.VED(input_dim, output_dim)
trainer = trainers.SVItrainer(vae)
weights_before = dc(vae.state_dict())
for _ in range(2):
trainer.step(train_loader)
weights_after = vae.state_dict()
assert_(not torch.isnan(tt(trainer.loss_history["training_loss"])).any())
assert_(not assert_weights_equal(weights_before, weights_after))
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,210
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/utils/coord.py
|
from typing import Union, Tuple
import torch
import pyro.distributions as dist
tt = torch.tensor
def grid2xy(X1: torch.Tensor, X2: torch.Tensor) -> torch.Tensor:
X = torch.cat((X1[None], X2[None]), 0)
d0, d1 = X.shape[0], X.shape[1] * X.shape[2]
X = X.reshape(d0, d1).T
return X
def imcoordgrid(im_dim: Tuple[int]) -> torch.Tensor:
xx = torch.linspace(-1, 1, im_dim[0])
yy = torch.linspace(1, -1, im_dim[1])
x0, x1 = torch.meshgrid(xx, yy)
return grid2xy(x0, x1)
def generate_grid(data_dim: Tuple[int]) -> torch.Tensor:
"""Generates 1D or 2D grid of coordinates. Returns a torch tensor with two
axes. If the input data_dim indicates only one dimensional data, then the
output will be a 2d torch tensor artificially augmented along the last
dimension, of shape [N, 1].
Args:
data_dim:
Dimensions of the input data.
Raises:
NotImplementedError:
If the dimension (length) of the provided data_dim is not equal to
1 or 2.
Returns:
The grid (always 2d).
"""
if len(data_dim) not in [1, 2]:
raise NotImplementedError("Currently supports only 1D and 2D data")
if len(data_dim) == 1:
return torch.linspace(-1, 1, data_dim[0])[:, None]
return imcoordgrid(data_dim)
def transform_coordinates(coord: torch.Tensor,
phi: Union[torch.Tensor, float] = 0,
coord_dx: Union[torch.Tensor, float] = 0,
scale: Union[torch.Tensor, float] = 1.,
) -> torch.Tensor:
"""
Rotation of 2D coordinates followed by scaling and translation.
For 1D grid, there is only transaltion. Operates on batches.
"""
if coord.shape[-1] == 1:
return coord + coord_dx
coord = rotate_coordinates(coord, phi)
coord = scale_coordinates(coord, scale)
return coord + coord_dx
def rotate_coordinates(coord: torch.Tensor,
phi: Union[torch.Tensor, float] = 0
) -> torch.Tensor:
"""
Rotation of 2D coordinates. Operates on batches
"""
if torch.sum(phi) == 0:
phi = coord.new_zeros(coord.shape[0])
rotmat_r1 = torch.stack([torch.cos(phi), torch.sin(phi)], 1)
rotmat_r2 = torch.stack([-torch.sin(phi), torch.cos(phi)], 1)
rotmat = torch.stack([rotmat_r1, rotmat_r2], axis=1)
coord = torch.bmm(coord, rotmat)
return coord
def scale_coordinates(coord: torch.Tensor,
scale: torch.Tensor
) -> torch.Tensor:
"""
Scaling of 2D coordinates. Operates on batches
"""
scalemat = coord.new_zeros(coord.shape[0], 2, 2)
scalemat[:, 0, 0] = scale
scalemat[:, 1, 1] = scale
coord = torch.bmm(coord, scalemat)
return coord
def generate_latent_grid(d: int, **kwargs) -> torch.Tensor:
"""
Generates a grid of latent space coordinates
"""
if isinstance(d, int):
d = [d, d]
z_coord = kwargs.get("z_coord")
if z_coord:
z1, z2, z3, z4 = z_coord
grid_x = torch.linspace(z2, z1, d[0])
grid_y = torch.linspace(z3, z4, d[1])
else:
grid_x = dist.Normal(0, 1).icdf(torch.linspace(0.95, 0.05, d[0]))
grid_y = dist.Normal(0, 1).icdf(torch.linspace(0.05, 0.95, d[1]))
z = []
for xi in grid_x:
for yi in grid_y:
z.append(tt([xi, yi]).float().unsqueeze(0))
return torch.cat(z), (grid_x, grid_y)
def generate_latent_grid_traversal(d: int, cont_dim: int, disc_dim,
cont_idx: int, cont_idx_fixed: int,
num_samples: int) -> Tuple[torch.Tensor]:
"""
Generates continuous and discrete grids for latent space traversal
"""
# Get continuous latent coordinates
samples_cont = torch.zeros(size=(num_samples, cont_dim)) + cont_idx_fixed
cont_traversal = dist.Normal(0, 1).icdf(torch.linspace(0.95, 0.05, d))
for i in range(d):
for j in range(d):
samples_cont[i * d + j, cont_idx] = cont_traversal[j]
# Get discrete latent coordinates
n = torch.arange(0, disc_dim)
n = n.tile(d // disc_dim + 1)[:d]
samples_disc = []
for i in range(d):
samples_disc_i = torch.zeros((d, disc_dim))
samples_disc_i[:, n[i]] = 1
samples_disc.append(samples_disc_i)
samples_disc = torch.cat(samples_disc)
return samples_cont, samples_disc
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,211
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/models/ssivae.py
|
"""
ssivae.py
=========
Semi-supervised variational autoencoder for data
with orientational, positional and scale disorders
Created by Maxim Ziatdinov (email: ziatdinovmax@gmail.com)
"""
from typing import Optional, Tuple, Union, Type, List
import pyro
import pyro.distributions as dist
import torch
from .base import baseVAE
from ..nets import fcDecoderNet, fcEncoderNet, sDecoderNet, fcClassifierNet
from ..utils import (generate_grid, get_sampler, plot_img_grid,
plot_spect_grid, set_deterministic_mode, to_onehot,
transform_coordinates, init_dataloader, generate_latent_grid,
generate_latent_grid_traversal, plot_grid_traversal)
tt = torch.tensor
class ssiVAE(baseVAE):
"""
Semi-supervised variational autoencoder with the enforcement
of rotational, translational, and scale invariances. It allows performing
a classification of image/spectral data given a small number of examples
even in the presence of a distribution shift between the labeled and unlabeled parts.
Args:
data_dim:
Dimensionality of the input data; use (h x w) for images
or (length,) for spectra.
latent_dim:
Number of latent dimensions.
num_classes:
Number of classes in the classification scheme
invariances:
List with invariances to enforce. For 2D systems, `r` enforces
rotational invariance, `t` enforces invariance to
translations, `sc` enforces a scale invariance, and
invariances=None corresponds to vanilla VAE.
For 1D systems, 't' enforces translational invariance and
invariances=None is vanilla VAE
hidden_dim_e:
Number of hidden units per each layer in encoder (inference network).
hidden_dim_d:
Number of hidden units per each layer in decoder (generator network).
hidden_dim_cls:
Number of hidden units ("neurons") in each layer of classifier
num_layers_e:
Number of layers in encoder (inference network).
num_layers_d:
Number of layers in decoder (generator network).
num_layers_cls:
Number of layers in classifier
activation:
Non-linear activation for inner layers of both encoder and the decoder.
The available activations are ReLU ('relu'), leaky ReLU ('lrelu'),
hyberbolic tangent ('tanh'), softplus ('softplus'), and GELU ('gelu').
(The default is "tanh").
sampler_d:
Decoder sampler, as defined as p(x|z) = sampler(decoder(z)).
The available samplers are 'bernoulli', 'continuous_bernoulli',
and 'gaussian' (Default: 'bernoulli').
sigmoid_d:
Sigmoid activation for the decoder output (Default: True)
seed:
Seed used in torch.manual_seed(seed) and
torch.cuda.manual_seed_all(seed)
Keyword Args:
device:
Sets device to which model and data will be moved.
Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.
dx_prior:
Translational prior in x direction (float between 0 and 1)
dy_prior:
Translational prior in y direction (float between 0 and 1)
sc_prior:
Scale prior (usually, sc_prior << 1)
decoder_sig:
Sets sigma for a "gaussian" decoder sampler
Examples:
Initialize a VAE model with rotational invariance for
semi-supervised learning of the dataset that has 10 classes
>>> data_dim = (28, 28)
>>> ssvae = ssiVAE(data_dim, latent_dim=2, num_classes=10, invariances=['r'])
"""
def __init__(self,
data_dim: Tuple[int],
latent_dim: int,
num_classes: int,
invariances: List[str] = None,
hidden_dim_e: int = 128,
hidden_dim_d: int = 128,
hidden_dim_cls: int = 128,
num_layers_e: int = 2,
num_layers_d: int = 2,
num_layers_cls: int = 2,
activation: str = "tanh",
sampler_d: str = "bernoulli",
sigmoid_d: bool = True,
seed: int = 1,
**kwargs: Union[str, float]
) -> None:
"""
Initializes ss-iVAE parameters
"""
args = (data_dim, invariances)
super(ssiVAE, self).__init__(*args, **kwargs)
pyro.clear_param_store()
set_deterministic_mode(seed)
self.data_dim = data_dim
# Initialize z-Encoder neural network
self.encoder_z = fcEncoderNet(
data_dim, latent_dim+self.coord, num_classes,
hidden_dim_e, num_layers_e, activation, flat=False)
# Initialize y-Encoder neural network
self.encoder_y = fcClassifierNet(
data_dim, num_classes, hidden_dim_cls, num_layers_cls,
activation)
# Initializes Decoder neural network
dnet = sDecoderNet if 0 < self.coord < 5 else fcDecoderNet
self.decoder = dnet(
data_dim, latent_dim, num_classes, hidden_dim_d,
num_layers_d, activation, sigmoid_out=sigmoid_d,
unflat=False)
self.sampler_d = get_sampler(sampler_d, **kwargs)
# Sets continuous and discrete dimensions
self.z_dim = latent_dim + self.coord
self.num_classes = num_classes
# Send model parameters to their appropriate devices.
self.to(self.device)
def model(self,
xs: torch.Tensor,
ys: Optional[torch.Tensor] = None,
**kwargs: float) -> None:
"""
Model of the generative process p(x|z,y)p(y)p(z)
"""
pyro.module("ss_vae", self)
batch_dim = xs.size(0)
specs = dict(dtype=xs.dtype, device=xs.device)
beta = kwargs.get("scale_factor", 1.)
# pyro.plate enforces independence between variables in batches xs, ys
with pyro.plate("data"):
# sample the latent vector from the constant prior distribution
prior_loc = torch.zeros(batch_dim, self.z_dim, **specs)
prior_scale = torch.ones(batch_dim, self.z_dim, **specs)
with pyro.poutine.scale(scale=beta):
zs = pyro.sample(
"z", dist.Normal(prior_loc, prior_scale).to_event(1))
# split latent variable into parts for rotation and/or translation
# and image content
if self.coord > 0:
phi, dx, sc, zs = self.split_latent(zs)
if 't' in self.invariances:
dx = (dx * self.t_prior).unsqueeze(1)
# transform coordinate grid
if 'r' in self.invariances:
expdim = phi.shape[0]
elif 't' in self.invariances:
expdim = dx.shape[0]
elif 's' in self.invariances:
expdim = sc.shape[0]
grid = self.grid.expand(expdim, *self.grid.shape)
x_coord_prime = transform_coordinates(grid, phi, dx, sc)
# sample label from the constant prior or observe the value
alpha_prior = (torch.ones(batch_dim, self.num_classes, **specs) /
self.num_classes)
ys = pyro.sample("y", dist.OneHotCategorical(alpha_prior), obs=ys)
# Score against the parametrized distribution
# p(x|y,z) = bernoulli(decoder(y,z))
d_args = (x_coord_prime, [zs, ys]) if self.coord else ([zs, ys],)
loc = self.decoder(*d_args)
loc = loc.view(*ys.shape[:-1], -1)
pyro.sample("x", self.sampler_d(loc).to_event(1), obs=xs)
def guide(self, xs: torch.Tensor,
ys: Optional[torch.Tensor] = None,
**kwargs: float) -> None:
"""
Guide q(z|y,x)q(y|x)
"""
beta = kwargs.get("scale_factor", 1.)
with pyro.plate("data"):
# sample and score the digit with the variational distribution
# q(y|x) = categorical(alpha(x))
if ys is None:
alpha = self.encoder_y(xs)
ys = pyro.sample("y", dist.OneHotCategorical(alpha))
# sample (and score) the latent vector with the variational
# distribution q(z|x,y) = normal(loc(x,y),scale(x,y))
loc, scale = self.encoder_z([xs, ys])
with pyro.poutine.scale(scale=beta):
pyro.sample("z", dist.Normal(loc, scale).to_event(1))
def split_latent(self, zs: torch.Tensor) -> Tuple[torch.Tensor]:
"""
Split latent variable into parts with rotation and/or translation
and image content
"""
zdims = list(zs.shape)
zdims[-1] = zdims[-1] - self.coord
zs = zs.view(-1, zs.size(-1))
# For 1D, there is only translation
phi, dx, sc, zs = self._split_latent(zs)
return phi, dx, sc, zs.view(*zdims)
def model_aux(self, xs: torch.Tensor,
ys: Optional[torch.Tensor] = None,
**kwargs: float) -> None:
"""
Models an auxiliary (supervised) loss
"""
pyro.module("ss_vae", self)
with pyro.plate("data"):
# the extra term to yield an auxiliary loss
aux_loss_multiplier = kwargs.get("aux_loss_multiplier", 20)
if ys is not None:
alpha = self.encoder_y.forward(xs)
with pyro.poutine.scale(scale=aux_loss_multiplier):
pyro.sample("y_aux", dist.OneHotCategorical(alpha), obs=ys)
def guide_aux(self, xs, ys=None, **kwargs):
"""
Dummy guide function to accompany model_classify
"""
pass
def set_classifier(self, cls_net: Type[torch.nn.Module]) -> None:
"""
Sets a user-defined classification network
"""
self.encoder_y = cls_net
def classifier(self,
x_new: torch.Tensor,
**kwargs: int) -> torch.Tensor:
"""
Classifies data
Args:
x_new:
Data to classify with a trained ss-iVAE. The new data must have
the same dimensions (images height x width or spectra length)
as the one used for training.
kwargs:
Batch size as 'batch_size' (for encoding large volumes of data)
"""
def classify(x_i) -> torch.Tensor:
with torch.no_grad():
alpha = self.encoder_y(x_i)
_, predicted = torch.max(alpha.data, 1)
return predicted.cpu()
x_new = init_dataloader(x_new, shuffle=False, **kwargs)
y_predicted = []
for (x_i,) in x_new:
y_predicted.append(classify(x_i.to(self.device)))
return torch.cat(y_predicted)
def encode(self,
x_new: torch.Tensor,
y: Optional[torch.Tensor] = None,
**kwargs: int) -> torch.Tensor:
"""
Encodes data using a trained inference (encoder) network
Args:
x_new:
Data to encode with a trained iVAE. The new data must have
the same dimensions (images height and width or spectra length)
as the one used for training.
y:
Classes as one-hot vectors for each sample in x_new. If not provided,
the ss-iVAE's classifier will be used to predict the classes.
kwargs:
Batch size as 'batch_size' (for encoding large volumes of data)
"""
if y is None:
y = self.classifier(x_new, **kwargs)
if y.ndim < 2:
y = to_onehot(y, self.num_classes)
z = self._encode(x_new, y, **kwargs)
z_loc, z_scale = z.split(self.z_dim, 1)
_, y_pred = torch.max(y, 1)
return z_loc, z_scale, y_pred
def decode(self, z: torch.Tensor, y: torch.Tensor, **kwargs: int) -> torch.Tensor:
"""
Decodes a batch of latent coordinates
Args:
z: Latent coordinates (without rotational and translational parts)
y: Classes as one-hot vectors for each sample in z
kwargs: Batch size as 'batch_size'
"""
z = torch.cat([z.to(self.device), y.to(self.device)], -1)
loc = self._decode(z, **kwargs)
return loc.view(-1, *self.data_dim)
def manifold2d(self, d: int, plot: bool = True,
**kwargs: Union[str, int, float]) -> torch.Tensor:
"""
Returns a learned latent manifold in the image space
Args:
d: Grid size
plot: Plots the generated manifold (Default: True)
kwargs: Keyword arguments include 'label' for class label (if any),
custom min/max values for grid boundaries passed as 'z_coord'
(e.g. z_coord = [-3, 3, -3, 3]), 'angle' and 'shift' to
condition a generative model one, and plot parameters
('padding', 'padding_value', 'cmap', 'origin', 'ylim')
"""
z, (grid_x, grid_y) = generate_latent_grid(d, **kwargs)
cls = tt(kwargs.get("label", 0))
if cls.ndim < 2:
cls = to_onehot(cls.unsqueeze(0), self.num_classes)
cls = cls.repeat(z.shape[0], 1)
loc = self.decode(z, cls, **kwargs)
if plot:
if self.ndim == 2:
plot_img_grid(
loc, d,
extent=[grid_x.min(), grid_x.max(), grid_y.min(), grid_y.max()],
**kwargs)
elif self.ndim == 1:
plot_spect_grid(loc, d, **kwargs)
return loc
def manifold_traversal(self, d: int, cont_idx: int, cont_idx_fixed: int = 0,
plot: bool = True, **kwargs: Union[str, int, float]
) -> torch.Tensor:
"""
Latent space traversal for continuous and discrete latent variables
Args:
d: Grid size
cont_idx:
Continuous latent variable used for plotting
a latent manifold traversal
cont_idx_fixed:
Value which the remaining continuous latent variables are fixed at
plot:
Plots the generated manifold (Default: True)
kwargs:
Keyword arguments include custom min/max values for grid
boundaries passed as 'z_coord' (e.g. z_coord = [-3, 3, -3, 3]),
'angle' and 'shift' to condition a generative model one,
and plot parameters ('padding', 'padding_value', 'cmap', 'origin', 'ylim')
"""
num_samples = d**2
disc_dim = self.num_classes
cont_dim = self.z_dim - self.coord
data_dim = self.data_dim
# Get continuous and discrete latent coordinates
samples_cont, samples_disc = generate_latent_grid_traversal(
d, cont_dim, disc_dim, cont_idx, cont_idx_fixed, num_samples)
# Pass discrete and continuous latent coordinates through a decoder
decoded = self.decode(samples_cont, samples_disc, **kwargs)
if plot:
plot_grid_traversal(decoded, d, data_dim, disc_dim, **kwargs)
return decoded
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,212
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/tests/test_models.py
|
import sys
from copy import deepcopy as dc
import torch
import pyro
import pyro.poutine as poutine
import pyro.distributions as dist
import pyro.infer as infer
from pyro.poutine.enum_messenger import EnumMessenger
import pytest
from numpy.testing import assert_equal, assert_
from numpy import array_equal
sys.path.append("../../")
from pyroved import models, nets, utils
tt = torch.tensor
def get_traces(model, *args):
guide_trace = pyro.poutine.trace(model.guide).get_trace(*args)
model_trace = pyro.poutine.trace(
pyro.poutine.replay(model.model, trace=guide_trace)).get_trace(*args)
return guide_trace, model_trace
def get_enum_traces(model, x):
guide_enum = EnumMessenger(first_available_dim=-2)
model_enum = EnumMessenger()
guide_ = guide_enum(
infer.config_enumerate(model.guide, "parallel", expand=True))
model_ = model_enum(model.model)
guide_trace = poutine.trace(guide_, graph_type="flat").get_trace(x)
model_trace = poutine.trace(
pyro.poutine.replay(model_, trace=guide_trace),
graph_type="flat").get_trace(x)
return guide_trace, model_trace
def assert_weights_equal(m1, m2):
eq_w = []
for p1, p2 in zip(m1.values(), m2.values()):
eq_w.append(array_equal(
p1.detach().cpu().numpy(),
p2.detach().cpu().numpy()))
return all(eq_w)
@pytest.mark.parametrize(
"invariances, coord_exp", [(None, 0), (['t'], 1)])
def test_base_vae_1d(invariances, coord_exp):
data_dim = (8,)
m = models.base.baseVAE(data_dim, invariances)
assert_equal(m.coord, coord_exp)
@pytest.mark.parametrize(
"invariances, coord_exp",
[(None, 0), (['r'], 1), (['t'], 2), (['s'], 1), (['r', 's', 't'], 4)])
def test_base_vae_2d(invariances, coord_exp):
data_dim = (8, 8)
m = models.base.baseVAE(data_dim, invariances)
assert_equal(m.coord, coord_exp)
@pytest.mark.parametrize("invariances", [['r'], ['s'], ['r', 't']])
def test_base_vae_1d_exception(invariances):
data_dim = (8,)
with pytest.raises(ValueError) as context:
_ = models.base.baseVAE(data_dim, invariances)
assert_("For 1D data, the only invariance to enforce is translation"
in str(context.exception))
def test_base_vae_split_latent_1d():
z = torch.randn(5, 3)
m = models.base.baseVAE((8,), ['t'])
phi, dx, sc, z = m._split_latent(z)
assert_(phi is None)
assert_(sc is None)
assert_(isinstance(dx, torch.Tensor))
assert_equal(dx.shape, (5, 1))
assert_(abs(dx).sum() > 0)
assert_(isinstance(z, torch.Tensor))
assert_equal(z.shape, (5, 2))
def test_base_vae_split_latent_2d():
z = torch.randn(5, 6)
m = models.base.baseVAE((8, 8), ['r', 't', 's'])
z_split = m._split_latent(z)
assert_(all([isinstance(z_, torch.Tensor) for z_ in z_split]))
assert_(z_split[0].shape, (5, 1))
assert_(z_split[1].shape, (5, 2))
assert_(z_split[2].shape, (5, 1))
assert_(z_split[3].shape, (5, 1))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_trvae_sites_dims_2d(invariances):
data_dim = (3, 8, 8)
x = torch.randn(*data_dim)
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.iVAE(data_dim[1:], invariances=invariances)
guide_trace, model_trace = get_traces(model, x)
assert_equal(model_trace.nodes["latent"]['value'].shape,
(data_dim[0], coord+2))
assert_equal(guide_trace.nodes["latent"]['value'].shape,
(data_dim[0], coord+2))
assert_equal(model_trace.nodes["obs"]['value'].shape,
(data_dim[0], torch.prod(tt(data_dim[1:])).item()))
@pytest.mark.parametrize("invariances", [None, ['t']])
def test_trvae_sites_dims_1d(invariances):
data_dim = (3, 8)
x = torch.randn(*data_dim)
coord = 0 if invariances is None else len(invariances)
model = models.iVAE(data_dim[1:], invariances=invariances)
guide_trace, model_trace = get_traces(model, x)
assert_equal(model_trace.nodes["latent"]['value'].shape,
(data_dim[0], coord+2))
assert_equal(guide_trace.nodes["latent"]['value'].shape,
(data_dim[0], coord+2))
assert_equal(model_trace.nodes["obs"]['value'].shape,
(data_dim[0], torch.prod(tt(data_dim[1:])).item()))
@pytest.mark.parametrize("invariances", [None, ['t']])
@pytest.mark.parametrize("data_dim", [(3, 8, 8), (3, 8)])
def test_trvae_sites_fn(data_dim, invariances):
x = torch.randn(*data_dim)
model = models.iVAE(data_dim[1:], invariances=invariances)
guide_trace, model_trace = get_traces(model, x)
assert_(isinstance(model_trace.nodes["latent"]['fn'].base_dist, dist.Normal))
assert_(isinstance(guide_trace.nodes["latent"]['fn'].base_dist, dist.Normal))
assert_(isinstance(model_trace.nodes["obs"]['fn'].base_dist, dist.Bernoulli))
@pytest.mark.parametrize("input_dim, output_dim",
[((8,), (8, 8)), ((8, 8), (8,)),
((8,), (8,)), ((8, 8), (8, 8))])
def test_ved_sites_dims(input_dim, output_dim):
x = torch.randn(2, 1, *input_dim)
y = torch.randn(2, 1, *output_dim)
model = models.VED(input_dim, output_dim)
guide_trace, model_trace = get_traces(model, x, y)
assert_equal(model_trace.nodes["z"]['value'].shape,
(x.shape[0], 2))
assert_equal(guide_trace.nodes["z"]['value'].shape,
(x.shape[0], 2))
assert_equal(model_trace.nodes["obs"]['value'].shape,
(y.shape[0], torch.prod(tt(output_dim)).item()))
@pytest.mark.parametrize("input_dim, output_dim",
[((8,), (8, 8)), ((8, 8), (8,)),
((8,), (8,)), ((8, 8), (8, 8))])
def test_ved_sites_fn(input_dim, output_dim):
x = torch.randn(2, 1, *input_dim)
y = torch.randn(2, 1, *output_dim)
model = models.VED(input_dim, output_dim)
guide_trace, model_trace = get_traces(model, x, y)
assert_(isinstance(model_trace.nodes["z"]['fn'].base_dist, dist.Normal))
assert_(isinstance(guide_trace.nodes["z"]['fn'].base_dist, dist.Normal))
assert_(isinstance(model_trace.nodes["obs"]['fn'].base_dist, dist.Bernoulli))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_jtrvae_cont_sites_dims_2d(invariances):
data_dim = (3, 8, 8)
x = torch.randn(*data_dim)
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.jiVAE(data_dim[1:], 2, 3, invariances=invariances)
guide_trace, model_trace = get_enum_traces(model, x)
assert_equal(model_trace.nodes["latent_cont"]['value'].shape,
(data_dim[0], coord+2))
assert_equal(guide_trace.nodes["latent_cont"]['value'].shape,
(data_dim[0], coord+2))
assert_equal(model_trace.nodes["obs"]['value'].shape,
(data_dim[0], torch.prod(tt(data_dim[1:])).item()))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_jtrvae_disc_sites_dims(invariances):
data_dim = (3, 8, 8)
x = torch.randn(*data_dim)
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.jiVAE(data_dim[1:], 2, 3, invariances=invariances)
guide_trace, model_trace = get_enum_traces(model, x)
assert_equal(model_trace.nodes["latent_disc"]['value'].shape,
(3, data_dim[0], 3))
assert_equal(guide_trace.nodes["latent_disc"]['value'].shape,
(3, data_dim[0], 3))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_jtrvae_cont_sites_fn(invariances):
data_dim = (3, 8, 8)
x = torch.randn(*data_dim)
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.jiVAE(data_dim[1:], 2, 3, invariances=invariances)
guide_trace, model_trace = get_enum_traces(model, x)
assert_(isinstance(model_trace.nodes["latent_cont"]['fn'].base_dist, dist.Normal))
assert_(isinstance(guide_trace.nodes["latent_cont"]['fn'].base_dist, dist.Normal))
assert_(isinstance(model_trace.nodes["obs"]['fn'].base_dist, dist.Bernoulli))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_jtrvae_disc_sites_fn(invariances):
data_dim = (3, 8, 8)
x = torch.randn(*data_dim)
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.jiVAE(data_dim[1:], 2, 3, invariances=invariances)
guide_trace, model_trace = get_enum_traces(model, x)
assert_(isinstance(model_trace.nodes["latent_disc"]['fn'], dist.OneHotCategorical))
assert_(isinstance(guide_trace.nodes["latent_disc"]['fn'], dist.OneHotCategorical))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_sstrvae_cont_sites_dims(invariances):
data_dim = (3, 8, 8)
x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.ssiVAE(data_dim[1:], 2, 3, invariances=invariances)
guide_trace, model_trace = get_enum_traces(model, x)
assert_equal(model_trace.nodes["z"]['value'].shape,
(3, data_dim[0], coord+2))
assert_equal(guide_trace.nodes["z"]['value'].shape,
(3, data_dim[0], coord+2))
assert_equal(model_trace.nodes["x"]['value'].shape,
(data_dim[0], torch.prod(tt(data_dim[1:])).item()))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_sstrvae_disc_sites_dims(invariances):
data_dim = (3, 8, 8)
x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.ssiVAE(data_dim[1:], 2, 3, invariances=invariances)
guide_trace, model_trace = get_enum_traces(model, x)
assert_equal(model_trace.nodes["y"]['value'].shape,
(3, data_dim[0], 3))
assert_equal(guide_trace.nodes["y"]['value'].shape,
(3, data_dim[0], 3))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_sstrvae_cont_sites_fn(invariances):
data_dim = (3, 8, 8)
x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.ssiVAE(data_dim[1:], 2, 3, invariances=invariances)
guide_trace, model_trace = get_enum_traces(model, x)
assert_(isinstance(model_trace.nodes["z"]['fn'].base_dist, dist.Normal))
assert_(isinstance(guide_trace.nodes["z"]['fn'].base_dist, dist.Normal))
assert_(isinstance(model_trace.nodes["x"]['fn'].base_dist, dist.Bernoulli))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_sstrvae_disc_sites_fn(invariances):
data_dim = (3, 8, 8)
x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.ssiVAE(data_dim[1:], 2, 3, invariances=invariances)
guide_trace, model_trace = get_enum_traces(model, x)
assert_(isinstance(model_trace.nodes["y"]['fn'], dist.OneHotCategorical))
assert_(isinstance(guide_trace.nodes["y"]['fn'], dist.OneHotCategorical))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_ssregvae_cont_sites_dims(invariances):
data_dim = (3, 8, 8)
x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.ss_reg_iVAE(data_dim[1:], 2, 3, invariances=invariances)
guide_trace, model_trace = get_traces(model, x)
assert_equal(model_trace.nodes["z"]['value'].shape,
(data_dim[0], coord+2))
assert_equal(guide_trace.nodes["z"]['value'].shape,
(data_dim[0], coord+2))
assert_equal(model_trace.nodes["x"]['value'].shape,
(data_dim[0], torch.prod(tt(data_dim[1:])).item()))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_ssregvae_disc_sites_dims(invariances):
data_dim = (3, 8, 8)
x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.ss_reg_iVAE(data_dim[1:], 2, 3, invariances=invariances)
guide_trace, model_trace = get_traces(model, x)
assert_equal(model_trace.nodes["y"]['value'].shape,
(data_dim[0], 3))
assert_equal(guide_trace.nodes["y"]['value'].shape,
(data_dim[0], 3))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_ssregvae_vae_sites_fn(invariances):
data_dim = (3, 8, 8)
x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.ss_reg_iVAE(data_dim[1:], 2, 3, invariances=invariances)
guide_trace, model_trace = get_traces(model, x)
assert_(isinstance(model_trace.nodes["z"]['fn'].base_dist, dist.Normal))
assert_(isinstance(guide_trace.nodes["z"]['fn'].base_dist, dist.Normal))
assert_(isinstance(model_trace.nodes["x"]['fn'].base_dist, dist.Bernoulli))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's'], ['s', 'r', 't']])
def test_ssregvae_reg_sites_fn(invariances):
data_dim = (3, 8, 8)
x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.ss_reg_iVAE(data_dim[1:], 2, 3, invariances=invariances)
guide_trace, model_trace = get_traces(model, x)
assert_(isinstance(model_trace.nodes["y"]['fn'].base_dist, dist.Normal))
assert_(isinstance(guide_trace.nodes["y"]['fn'].base_dist, dist.Normal))
@pytest.mark.parametrize(
"sampler, expected_dist",
[("gaussian", dist.Normal), ("bernoulli", dist.Bernoulli),
("continuous_bernoulli", dist.ContinuousBernoulli)])
def test_trvae_decoder_sampler(sampler, expected_dist):
data_dim = (2, 8, 8)
x = torch.randn(*data_dim)
model = models.iVAE(data_dim[1:], coord=1, sampler_d=sampler)
_, model_trace = get_traces(model, x)
assert_(isinstance(model_trace.nodes["obs"]['fn'].base_dist, expected_dist))
@pytest.mark.parametrize(
"sampler, expected_dist",
[("gaussian", dist.Normal), ("bernoulli", dist.Bernoulli),
("continuous_bernoulli", dist.ContinuousBernoulli)])
def test_ved_decoder_sampler(sampler, expected_dist):
input_dim = (8, 8)
output_dim = (8,)
x = torch.randn(2, 1, *input_dim)
y = torch.randn(2, 1, *output_dim)
model = models.VED(input_dim, output_dim, sampler_d=sampler)
_, model_trace = get_traces(model, x, y)
assert_(isinstance(model_trace.nodes["obs"]['fn'].base_dist, expected_dist))
@pytest.mark.parametrize(
"sampler, expected_dist",
[("gaussian", dist.Normal), ("bernoulli", dist.Bernoulli),
("continuous_bernoulli", dist.ContinuousBernoulli)])
def test_jtrvae_decoder_sampler(sampler, expected_dist):
data_dim = (2, 8, 8)
x = torch.randn(*data_dim)
model = models.jiVAE(data_dim[1:], 2, 3, coord=1, sampler_d=sampler)
_, model_trace = get_enum_traces(model, x)
assert_(isinstance(model_trace.nodes["obs"]['fn'].base_dist, expected_dist))
@pytest.mark.parametrize(
"sampler, expected_dist",
[("gaussian", dist.Normal), ("bernoulli", dist.Bernoulli),
("continuous_bernoulli", dist.ContinuousBernoulli)])
def test_sstrvae_decoder_sampler(sampler, expected_dist):
data_dim = (2, 64)
x = torch.randn(*data_dim)
model = models.ssiVAE(data_dim[1:], 2, 3, coord=1, sampler_d=sampler)
_, model_trace = get_enum_traces(model, x)
assert_(isinstance(model_trace.nodes["x"]['fn'].base_dist, expected_dist))
@pytest.mark.parametrize("data_dim", [(2, 8), (2, 8, 8), (3, 8), (3, 8, 8)])
def test_basevae_encode_x(data_dim):
x = torch.randn(*data_dim)
vae = models.base.baseVAE(data_dim[1:], None)
encoder_net = nets.fcEncoderNet(data_dim[1:], 2, 0)
vae.set_encoder(encoder_net)
encoded = vae._encode(x)
assert_equal(encoded[:, :2].shape, (data_dim[0], 2))
assert_equal(encoded[:, 2:].shape, (data_dim[0], 2))
def test_basevae_encode_xy():
data_dim = (2, 64)
x = torch.randn(*data_dim)
alpha = torch.ones(data_dim[0], 3) / 3
y = dist.OneHotCategorical(alpha).sample()
vae = models.base.baseVAE(data_dim[1:], None)
encoder_net = nets.fcEncoderNet(data_dim[1:], 2, 3)
vae.set_encoder(encoder_net)
encoded = vae._encode(x, y)
assert_equal(encoded[:, :2].shape, (data_dim[0], 2))
assert_equal(encoded[:, 2:].shape, (data_dim[0], 2))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's']])
def test_basevae_decode_x(invariances):
data_dim = (3, 8, 8)
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
z = torch.randn(data_dim[0], 2)
vae = models.base.baseVAE(data_dim[1:], invariances)
vae.coord = coord
vae.grid = utils.generate_grid(data_dim[1:]).to(vae.device)
dnet = nets.sDecoderNet if 0 < coord < 5 else nets.fcDecoderNet
decoder_net = dnet(data_dim[1:], 2)
vae.set_decoder(decoder_net)
decoded = vae._decode(z)
assert_equal(decoded.squeeze().shape, data_dim)
@pytest.mark.parametrize("vae_model", [models.jiVAE, models.ssiVAE])
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's']])
def test_jsstrvae_decode(vae_model, invariances):
data_dim = (38, 8)
model = vae_model(data_dim, 2, 3, invariances=invariances)
z_coord = torch.tensor([0.0, 0.0]).unsqueeze(0)
y = utils.to_onehot(torch.tensor(0).unsqueeze(0), 3)
decoded = model.decode(z_coord, y)
assert_equal(decoded.squeeze().shape, data_dim)
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's']])
def test_trvae_decode_2d(invariances):
data_dim = (8, 8)
model = models.iVAE(data_dim, invariances=invariances)
z_coord = torch.tensor([0.0, 0.0]).unsqueeze(0)
decoded = model.decode(z_coord)
assert_equal(decoded.squeeze().shape, data_dim)
@pytest.mark.parametrize("invariances", [None, ['t']])
def test_trvae_decode_1d(invariances):
data_dim = (8,)
model = models.iVAE(data_dim, invariances=invariances)
z_coord = torch.tensor([0.0, 0.0]).unsqueeze(0)
decoded = model.decode(z_coord)
assert_equal(decoded.squeeze().shape, data_dim)
@pytest.mark.parametrize("input_dim, output_dim",
[((8,), (8, 8)), ((8, 8), (8,)),
((8,), (8,)), ((8, 8), (8, 8))])
def test_ved_decode(input_dim, output_dim):
z_coord = torch.tensor([0.0, 0.0]).unsqueeze(0)
model = models.VED(input_dim, output_dim)
decoded = model.decode(z_coord)
assert_equal(decoded.squeeze().shape, output_dim)
@pytest.mark.parametrize("input_dim, output_dim",
[((8,), (8, 8)), ((8, 8), (8,)),
((8,), (8,)), ((8, 8), (8, 8))])
def test_ved_predict(input_dim, output_dim):
x = torch.randn(2, 1, *input_dim)
model = models.VED(input_dim, output_dim)
prediction, _ = model.predict(x)
assert_equal(prediction.squeeze().shape, (2, *output_dim))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['r', 't', 's']])
def test_ctrvae_decode(invariances):
data_dim = (8, 8)
model = models.iVAE(data_dim, c_dim=3, invariances=invariances)
z_coord = torch.tensor([0.0, 0.0]).unsqueeze(0)
y = utils.to_onehot(torch.tensor(0).unsqueeze(0), 3)
decoded = model.decode(z_coord, y)
assert_equal(decoded.squeeze().shape, data_dim)
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])
def test_trvae_encode_2d(invariances):
data_dim = (3, 8, 8)
x = torch.randn(*data_dim)
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances and len(data_dim[1:]) == 2:
coord = coord + 1
model = models.iVAE(data_dim[1:], 2, invariances=invariances)
encoded = model.encode(x)
assert_equal(encoded[0].shape, (data_dim[0], coord+2))
assert_equal(encoded[0].shape, encoded[1].shape)
@pytest.mark.parametrize("invariances", [None, ['t']])
def test_trvae_encode_1d(invariances):
data_dim = (3, 8)
x = torch.randn(*data_dim)
coord = 0 if invariances is None else len(invariances)
model = models.iVAE(data_dim[1:], 2, invariances=invariances)
encoded = model.encode(x)
assert_equal(encoded[0].shape, (data_dim[0], coord+2))
assert_equal(encoded[0].shape, encoded[1].shape)
@pytest.mark.parametrize("input_dim, output_dim",
[((8,), (8, 8)), ((8, 8), (8,)),
((8,), (8,)), ((8, 8), (8, 8))])
def test_ved_encode(input_dim, output_dim):
x = torch.randn(2, 1, *input_dim)
model = models.VED(input_dim, output_dim)
encoded = model.encode(x)
assert_equal(encoded[0].shape, (x.shape[0], 2))
assert_equal(encoded[0].shape, encoded[1].shape)
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])
def test_jtrvae_encode(invariances):
data_dim = (3, 8, 8)
x = torch.randn(*data_dim)
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances:
coord = coord + 1
model = models.jiVAE(data_dim[1:], 2, 3, invariances=invariances)
encoded = model.encode(x)
assert_equal(encoded[0].shape, encoded[1].shape)
assert_equal(encoded[0].shape, (data_dim[0], coord+2))
assert_equal(encoded[2].shape, (data_dim[0],))
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])
def test_sstrvae_encode(invariances):
data_dim = (3, 8, 8)
x = torch.randn(data_dim[0], torch.prod(tt(data_dim[1:])).item())
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances:
coord = coord + 1
model = models.ssiVAE(data_dim[1:], 2, 5, invariances=invariances)
encoded = model.encode(x)
assert_equal(encoded[0].shape, encoded[1].shape)
assert_equal(encoded[0].shape, (data_dim[0], coord+2))
assert_equal(encoded[2].shape, (data_dim[0],))
@pytest.mark.parametrize("num_classes", [0, 2, 3])
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])
def test_trvae_manifold2d(invariances, num_classes):
data_dim = (8, 8)
model = models.iVAE(data_dim, c_dim=num_classes, invariances=invariances)
y = None
if num_classes > 0:
y = utils.to_onehot(torch.tensor(0).unsqueeze(0), num_classes)
decoded_grid = model.manifold2d(4, y, plot=True)
assert_equal(decoded_grid.squeeze().shape, (16, *data_dim))
@pytest.mark.parametrize("input_dim, output_dim",
[((8,), (8, 8)), ((8, 8), (8,)),
((8,), (8,)), ((8, 8), (8, 8))])
def test_ved_manifold2d(input_dim, output_dim):
model = models.VED(input_dim, output_dim)
decoded_grid = model.manifold2d(4, plot=True)
assert_equal(decoded_grid.squeeze().shape, (16, *output_dim))
@pytest.mark.parametrize("vae_model", [models.jiVAE, models.ssiVAE])
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])
def test_jsstrvae_manifold2d(vae_model, invariances):
data_dim = (8, 8)
model = vae_model(data_dim, 2, 3, invariances=invariances)
decoded_grid = model.manifold2d(4, plot=True)
assert_equal(decoded_grid.squeeze().shape, (16, *data_dim))
@pytest.fixture(scope='session')
@pytest.mark.parametrize("invariances", [None, ['r'], ['s'], ['t'], ['r', 't', 's']])
def test_save_load_basevae(invariances):
data_dim = (5, 8, 8)
coord = 0
if invariances is not None:
coord = len(invariances)
if 't' in invariances:
coord = coord + 1
vae = models.base.baseVAE()
encoder_net = nets.fcEncoderNet(data_dim[1:], 2+coord, 0)
dnet = nets.sDecoderNet if 0 < coord < 5 else nets.fcDecoderNet
decoder_net = dnet(data_dim, 2, 0)
vae.set_encoder(encoder_net)
vae.set_decoder(decoder_net)
weights_init = dc(vae.state_dict())
vae.save_weights("my_weights")
vae.load_weights("my_weights.pt")
weights_loaded = vae.state_dict()
assert_(assert_weights_equal(weights_loaded, weights_init))
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,213
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/models/ss_reg_ivae.py
|
"""
ss_reg_ivae.py
==============
Variational autoencoder for semi-supervised regression
with an option to enforce orientational, positional and scale
invariances
Created by Maxim Ziatdinov (email: ziatdinovmax@gmail.com)
"""
from typing import List, Optional, Tuple, Type, Union
import pyro
import pyro.distributions as dist
import torch
from ..nets import fcDecoderNet, fcEncoderNet, fcRegressorNet, sDecoderNet
from ..utils import (generate_latent_grid, get_sampler, init_dataloader,
plot_img_grid, plot_spect_grid, set_deterministic_mode,
transform_coordinates)
from .base import baseVAE
class ss_reg_iVAE(baseVAE):
"""
Semi-supervised variational autoencoder for regression tasks
with the enforcement of rotational, translational, and scale invariances.
Args:
data_dim:
Dimensionality of the input data; use (h x w) for images
or (length,) for spectra.
latent_dim:
Number of latent dimensions.
reg_dim:
Number of output dimensions in regression. For example,
for a single output regressor, specify reg_dim=1.
invariances:
List with invariances to enforce. For 2D systems, `r` enforces
rotational invariance, `t` enforces invariance to
translations, `sc` enforces a scale invariance, and
invariances=None corresponds to vanilla VAE.
For 1D systems, 't' enforces translational invariance and
invariances=None is vanilla VAE
hidden_dim_e:
Number of hidden units per each layer in encoder (inference network).
hidden_dim_d:
Number of hidden units per each layer in decoder (generator network).
hidden_dim_cls:
Number of hidden units ("neurons") in each layer of classifier
num_layers_e:
Number of layers in encoder (inference network).
num_layers_d:
Number of layers in decoder (generator network).
num_layers_cls:
Number of layers in classifier
activation:
Non-linear activation for inner layers of both encoder and the decoder.
The available activations are ReLU ('relu'), leaky ReLU ('lrelu'),
hyberbolic tangent ('tanh'), softplus ('softplus'), and GELU ('gelu').
(The default is "tanh").
sampler_d:
Decoder sampler, as defined as p(x|z) = sampler(decoder(z)).
The available samplers are 'bernoulli', 'continuous_bernoulli',
and 'gaussian' (Default: 'bernoulli').
sigmoid_d:
Sigmoid activation for the decoder output (Default: True)
seed:
Seed used in torch.manual_seed(seed) and
torch.cuda.manual_seed_all(seed)
Keyword Args:
device:
Sets device to which model and data will be moved.
Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.
dx_prior:
Translational prior in x direction (float between 0 and 1)
dy_prior:
Translational prior in y direction (float between 0 and 1)
sc_prior:
Scale prior (usually, sc_prior << 1)
decoder_sig:
Sets sigma for a "gaussian" decoder sampler
regressor_sig:
Sets sigma for a regression sampler
Examples:
Initialize a VAE model with rotational invariance for
a semi-supervised single-output regression.
>>> data_dim = (28, 28)
>>> ssvae = ss_reg_iVAE(data_dim, latent_dim=2, reg_dim=1, invariances=['r'])
"""
def __init__(self,
data_dim: Tuple[int],
latent_dim: int,
reg_dim: int,
invariances: List[str] = None,
hidden_dim_e: int = 128,
hidden_dim_d: int = 128,
hidden_dim_cls: int = 128,
num_layers_e: int = 2,
num_layers_d: int = 2,
num_layers_cls: int = 2,
activation: str = "tanh",
sampler_d: str = "bernoulli",
sigmoid_d: bool = True,
seed: int = 1,
**kwargs: Union[str, float]
) -> None:
"""
Initializes ss_reg_iVAE parameters
"""
args = (data_dim, invariances)
super(ss_reg_iVAE, self).__init__(*args, **kwargs)
pyro.clear_param_store()
set_deterministic_mode(seed)
self.data_dim = data_dim
# Initialize z-Encoder neural network
self.encoder_z = fcEncoderNet(
data_dim, latent_dim+self.coord, reg_dim,
hidden_dim_e, num_layers_e, activation, flat=False)
# Initialize y-Encoder neural network
self.encoder_y = fcRegressorNet(
data_dim, reg_dim, hidden_dim_cls, num_layers_cls,
activation)
# Initializes Decoder neural network
dnet = sDecoderNet if 0 < self.coord < 5 else fcDecoderNet
self.decoder = dnet(
data_dim, latent_dim, reg_dim, hidden_dim_d,
num_layers_d, activation, sigmoid_out=sigmoid_d,
unflat=False)
self.sampler_d = get_sampler(sampler_d, **kwargs)
# Set sigma for regression sampler
self.reg_sig = kwargs.get("regressor_sig", 0.5)
# Sets continuous and discrete dimensions
self.z_dim = latent_dim + self.coord
self.reg_dim = reg_dim
# Send model parameters to their appropriate devices.
self.to(self.device)
def model(self,
xs: torch.Tensor,
ys: Optional[torch.Tensor] = None,
**kwargs: float) -> None:
"""
Model of the generative process p(x|z,y)p(y)p(z)
"""
pyro.module("ss_vae", self)
batch_dim = xs.size(0)
specs = dict(dtype=xs.dtype, device=xs.device)
beta = kwargs.get("scale_factor", 1.)
# pyro.plate enforces independence between variables in batches xs, ys
with pyro.plate("data"):
# sample the latent vector from the constant prior distribution
prior_loc = torch.zeros(batch_dim, self.z_dim, **specs)
prior_scale = torch.ones(batch_dim, self.z_dim, **specs)
with pyro.poutine.scale(scale=beta):
zs = pyro.sample(
"z", dist.Normal(prior_loc, prior_scale).to_event(1))
# split latent variable into parts for rotation and/or translation
# and image content
if self.coord > 0:
phi, dx, sc, zs = self.split_latent(zs)
if 't' in self.invariances:
dx = (dx * self.t_prior).unsqueeze(1)
# transform coordinate grid
grid = self.grid.expand(zs.shape[0], *self.grid.shape)
x_coord_prime = transform_coordinates(grid, phi, dx, sc)
# sample label from the constant prior or observe the value
c_prior = (torch.zeros(batch_dim, self.reg_dim, **specs))
ys = pyro.sample(
"y", dist.Normal(c_prior, self.reg_sig).to_event(1), obs=ys)
# Score against the parametrized distribution
# p(x|y,z) = bernoulli(decoder(y,z))
d_args = (x_coord_prime, [zs, ys]) if self.coord else ([zs, ys],)
loc = self.decoder(*d_args)
loc = loc.view(*ys.shape[:-1], -1)
pyro.sample("x", self.sampler_d(loc).to_event(1), obs=xs)
def guide(self, xs: torch.Tensor,
ys: Optional[torch.Tensor] = None,
**kwargs: float) -> None:
"""
Guide q(z|y,x)q(y|x)
"""
beta = kwargs.get("scale_factor", 1.)
with pyro.plate("data"):
# sample and score the digit with the variational distribution
# q(y|x) = categorical(alpha(x))
if ys is None:
c = self.encoder_y(xs)
ys = pyro.sample("y", dist.Normal(c, self.reg_sig).to_event(1))
# sample (and score) the latent vector with the variational
# distribution q(z|x,y) = normal(loc(x,y),scale(x,y))
loc, scale = self.encoder_z([xs, ys])
with pyro.poutine.scale(scale=beta):
pyro.sample("z", dist.Normal(loc, scale).to_event(1))
def split_latent(self, zs: torch.Tensor) -> Tuple[torch.Tensor]:
"""
Split latent variable into parts with rotation and/or translation
and image content
"""
zdims = list(zs.shape)
zdims[-1] = zdims[-1] - self.coord
zs = zs.view(-1, zs.size(-1))
# For 1D, there is only translation
phi, dx, sc, zs = self._split_latent(zs)
return phi, dx, sc, zs.view(*zdims)
def model_aux(self, xs: torch.Tensor,
ys: Optional[torch.Tensor] = None,
**kwargs: float) -> None:
"""
Models an auxiliary (supervised) loss
"""
pyro.module("ss_vae", self)
with pyro.plate("data"):
# the extra term to yield an auxiliary loss
aux_loss_multiplier = kwargs.get("aux_loss_multiplier", 20)
if ys is not None:
c = self.encoder_y.forward(xs)
with pyro.poutine.scale(scale=aux_loss_multiplier):
pyro.sample(
"y_aux", dist.Normal(c, self.reg_sig).to_event(1), obs=ys)
def guide_aux(self, xs, ys=None, **kwargs):
"""
Dummy guide function to accompany model_aux
"""
pass
def set_regressor(self, reg_net: Type[torch.nn.Module]) -> None:
"""
Sets a user-defined regression network
"""
self.encoder_y = reg_net
def regressor(self,
x_new: torch.Tensor,
**kwargs: int) -> torch.Tensor:
"""
Applies trained regressor to new data
Args:
x_new:
Input data for the regressor part of trained ss-reg-VAE.
The new data must have the same dimensions
(images height x width or spectra length) as the one used
for training.
kwargs:
Batch size as 'batch_size' (for encoding large volumes of data)
"""
def regress(x_i) -> torch.Tensor:
with torch.no_grad():
predicted = self.encoder_y(x_i)
return predicted.cpu()
x_new = init_dataloader(x_new, shuffle=False, **kwargs)
y_predicted = []
for (x_i,) in x_new:
y_predicted.append(regress(x_i.to(self.device)))
return torch.cat(y_predicted)
def encode(self,
x_new: torch.Tensor,
y: Optional[torch.Tensor] = None,
**kwargs: int) -> torch.Tensor:
"""
Encodes data using a trained inference (encoder) network
Args:
x_new:
Data to encode. The new data must have
the same dimensions (images height and width or spectra length)
as the one used for training.
y:
Vector with a continuous variable(s) for each sample in x_new.
If not provided, the ss-reg-iVAE's regressor will be used to obtain it.
kwargs:
Batch size as 'batch_size' (for encoding large volumes of data)
"""
if y is None:
y = self.regressor(x_new, **kwargs)
z = self._encode(x_new, y, **kwargs)
z_loc, z_scale = z.split(self.z_dim, 1)
return z_loc, z_scale, y
def decode(self, z: torch.Tensor, y: torch.Tensor, **kwargs: int) -> torch.Tensor:
"""
Decodes a batch of latent coordinates
Args:
z: Latent coordinates (without rotational and translational parts)
y: Vector with continuous variable(s) for each sample in z
kwargs: Batch size as 'batch_size'
"""
z = torch.cat([z.to(self.device), y.to(self.device)], -1)
loc = self._decode(z, **kwargs)
return loc.view(-1, *self.data_dim)
def manifold2d(self, d: int, y: torch.Tensor, plot: bool = True,
**kwargs: Union[str, int, float]) -> torch.Tensor:
"""
Returns a learned latent manifold in the image space
Args:
d: Grid size
y: Conditional vector
plot: Plots the generated manifold (Default: True)
kwargs: Keyword arguments include custom min/max values
for grid boundaries passed as 'z_coord'
(e.g. z_coord = [-3, 3, -3, 3]), 'angle' and
'shift' to condition a generative model on, and plot parameters
('padding', 'padding_value', 'cmap', 'origin', 'ylim')
"""
z, (grid_x, grid_y) = generate_latent_grid(d, **kwargs)
y = y.unsqueeze(1) if 0 < y.ndim < 2 else y
y = y.expand(z.shape[0], *y.shape[1:])
loc = self.decode(z, y, **kwargs)
if plot:
if self.ndim == 2:
plot_img_grid(
loc, d,
extent=[grid_x.min(), grid_x.max(), grid_y.min(), grid_y.max()],
**kwargs)
elif self.ndim == 1:
plot_spect_grid(loc, d, **kwargs)
return loc
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,214
|
matthewcarbone/pyroVED
|
refs/heads/main
|
/pyroved/trainers/svi.py
|
from typing import Type, Optional, Union
import torch
import pyro
import pyro.infer as infer
import pyro.optim as optim
from ..utils import set_deterministic_mode
class SVItrainer:
"""
Stochastic variational inference (SVI) trainer for
unsupervised and class-conditioned VED models consisting
of one encoder and one decoder.
Args:
model:
Initialized model. Must be a subclass of torch.nn.Module
and have self.model and self.guide methods
optimizer:
Pyro optimizer (Defaults to Adam with learning rate 1e-3)
loss:
ELBO objective (Defaults to pyro.infer.Trace_ELBO)
enumerate_parallel:
Exact discrete enumeration for discrete latent variables
seed:
Enforces reproducibility
Keyword Args:
lr: learning rate (Default: 1e-3)
device:
Sets device to which model and data will be moved.
Defaults to 'cuda:0' if a GPU is available and to CPU otherwise.
Examples:
Train a model with SVI trainer using default settings
>>> # Initialize model
>>> data_dim = (28, 28)
>>> trvae = pyroved.models.iVAE(data_dim, latent_dim=2, invariances=['r', 't'])
>>> # Initialize SVI trainer
>>> trainer = SVItrainer(trvae)
>>> # Train for 200 epochs:
>>> for _ in range(200):
>>> trainer.step(train_loader)
>>> trainer.print_statistics()
Train a model with SVI trainer with a "time"-dependent KL scaling factor
>>> # Initialize model
>>> data_dim = (28, 28)
>>> rvae = pyroved.models.iVAE(data_dim, latent_dim=2, invariances=['r'])
>>> # Initialize SVI trainer
>>> trainer = SVItrainer(rvae)
>>> kl_scale = torch.linspace(1, 4, 50) # ramp-up KL scale factor from 1 to 4 during first 50 epochs
>>> # Train
>>> for e in range(100):
>>> sc = kl_scale[e] if e < len(kl_scale) else kl_scale[-1]
>>> trainer.step(train_loader, scale_factor=sc)
>>> trainer.print_statistics()
"""
def __init__(self,
model: Type[torch.nn.Module],
optimizer: Type[optim.PyroOptim] = None,
loss: Type[infer.ELBO] = None,
enumerate_parallel: bool = False,
seed: int = 1,
**kwargs: Union[str, float]
) -> None:
"""
Initializes the trainer's parameters
"""
pyro.clear_param_store()
set_deterministic_mode(seed)
self.device = kwargs.get(
"device", 'cuda' if torch.cuda.is_available() else 'cpu')
if optimizer is None:
lr = kwargs.get("lr", 1e-3)
optimizer = optim.Adam({"lr": lr})
if loss is None:
if enumerate_parallel:
loss = infer.TraceEnum_ELBO(
max_plate_nesting=1, strict_enumeration_warning=False)
else:
loss = infer.Trace_ELBO()
guide = model.guide
if enumerate_parallel:
guide = infer.config_enumerate(guide, "parallel", expand=True)
self.svi = infer.SVI(model.model, guide, optimizer, loss=loss)
self.loss_history = {"training_loss": [], "test_loss": []}
self.current_epoch = 0
def train(self,
train_loader: Type[torch.utils.data.DataLoader],
**kwargs: float) -> float:
"""
Trains a single epoch
"""
# initialize loss accumulator
epoch_loss = 0.
# do a training epoch over each mini-batch returned by the data loader
for data in train_loader:
if len(data) == 1: # VAE mode
x = data[0]
loss = self.svi.step(x.to(self.device), **kwargs)
else: # VED or cVAE mode
x, y = data
loss = self.svi.step(
x.to(self.device), y.to(self.device), **kwargs)
# do ELBO gradient and accumulate loss
epoch_loss += loss
return epoch_loss / len(train_loader.dataset)
def evaluate(self,
test_loader: Type[torch.utils.data.DataLoader],
**kwargs: float) -> float:
"""
Evaluates current models state on a single epoch
"""
# initialize loss accumulator
test_loss = 0.
# compute the loss over the entire test set
with torch.no_grad():
for data in test_loader:
if len(data) == 1: # VAE mode
x = data[0]
loss = self.svi.step(x.to(self.device), **kwargs)
else: # VED or cVAE mode
x, y = data
loss = self.svi.step(
x.to(self.device), y.to(self.device), **kwargs)
test_loss += loss
return test_loss / len(test_loader.dataset)
def step(self,
train_loader: Type[torch.utils.data.DataLoader],
test_loader: Optional[Type[torch.utils.data.DataLoader]] = None,
**kwargs: float) -> None:
"""
Single training and (optionally) evaluation step
Args:
train_loader:
Pytorch’s dataloader object with training data
test_loader:
(Optional) Pytorch’s dataloader object with test data
Keyword Args:
scale_factor:
Scale factor for KL divergence. See e.g. https://arxiv.org/abs/1804.03599
Default value is 1 (i.e. no scaling)
"""
train_loss = self.train(train_loader, **kwargs)
self.loss_history["training_loss"].append(train_loss)
if test_loader is not None:
test_loss = self.evaluate(test_loader, **kwargs)
self.loss_history["test_loss"].append(test_loss)
self.current_epoch += 1
def print_statistics(self) -> None:
"""
Prints training and test (if any) losses for current epoch
"""
e = self.current_epoch
if len(self.loss_history["test_loss"]) > 0:
template = 'Epoch: {} Training loss: {:.4f}, Test loss: {:.4f}'
print(template.format(e, self.loss_history["training_loss"][-1],
self.loss_history["test_loss"][-1]))
else:
template = 'Epoch: {} Training loss: {:.4f}'
print(template.format(e, self.loss_history["training_loss"][-1]))
|
{"/pyroved/models/__init__.py": ["/pyroved/models/ivae.py", "/pyroved/models/ssivae.py", "/pyroved/models/ss_reg_ivae.py", "/pyroved/models/jivae.py", "/pyroved/models/ved.py"], "/pyroved/models/jivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"], "/pyroved/models/ved.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/nets/__init__.py": ["/pyroved/nets/conv.py", "/pyroved/nets/fc.py"], "/pyroved/models/ivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ssivae.py": ["/pyroved/models/base.py", "/pyroved/nets/__init__.py"], "/pyroved/models/ss_reg_ivae.py": ["/pyroved/nets/__init__.py", "/pyroved/models/base.py"]}
|
20,215
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Character_Builder/models.py
|
from django.db import models
from django.contrib.auth.models import User
from django.urls import reverse
from datetime import datetime
# Notes (Django models)
# Each model acts more or less like a database table
# Each model's field acts like a column in said table
# Foreign Keys act as a thing that links a class
# to a parent class that uses it.
# eg: abilityScore's "characterID" is a foreign key
# https://docs.djangoproject.com/en/2.1/topics/db/models/
# can do fieldName = ___Field(blank = true) to make this field optional
# can do fieldName = ___Field(choices = LIST_NAME) to make it have a dropdown to the choices given
# can do from geography.models import ZipCode
# Constants
# Should I move this next to the class that uses it?
MAX_LENGTH_CHARACTER_NAME = 255
MAX_LENGTH_ALIGNMENT = 255
MAX_LENGTH_SIZE = 255
DEFAULT_LEVEL = 0
DEFAULT_XP = 0
DEFAULT_HP = 6
MAX_LENGTH_ABILITY_NAME = 255
MAX_LENGTH_CLASS_NAME = 255
MAX_LENGTH_HIT_DICE = 255
MAX_LENGTH_RACE_NAME = 255
DEFAULT_ABILITY_SCORE = 10
DEFAULT_ABILITY_SCORE_BONUS = 0
DEFAULT_DATETIME = datetime.min
# Description of this model file
# Much of this will be based off of the database schemas
# As this is in the character builder folder, this will focus on
# the character information
# finds a default user
def defaultUser():
default = User.objects.first()
if default is None:
default = User.objects.create_user('defaultUser', password='djangoproject', last_login=DEFAULT_DATETIME)
return default
# Sets default race to human
def defaultRace():
default = CharacterRace.objects.first()
if default is None:
default = CharacterRace(
raceName='Human',
speed=30,
size='Medium',
strengthBonus=1,
dexterityBonus=1,
constitutionBonus=1,
intelligenceBonus=1,
wisdomBonus=1,
charismaBonus=1
)
default.save()
# Returns the primary key, not the race itself
return default.raceID
# Sets default class to fighter
def defaultClass():
default = CharacterClass.objects.first()
if default is None:
default = CharacterClass(
className='Fighter',
hitDice='d8'
)
default.save()
return default.characterID
# This class is largely static, like a lookup table
# Note: because the character has a key to this, it must
# be above the Character class
class CharacterRace(models.Model):
raceID = models.AutoField(primary_key=True)
raceName = models.CharField(max_length = MAX_LENGTH_RACE_NAME)
speed = models.IntegerField()
size = models.CharField(max_length = MAX_LENGTH_SIZE) # Okay to overload?
# Welp, I'm going to make this simpler and just hard-code
# the 6 most essential stats
strengthBonus = models.IntegerField(default=DEFAULT_ABILITY_SCORE_BONUS)
dexterityBonus = models.IntegerField(default=DEFAULT_ABILITY_SCORE_BONUS)
constitutionBonus = models.IntegerField(default=DEFAULT_ABILITY_SCORE_BONUS)
intelligenceBonus = models.IntegerField(default=DEFAULT_ABILITY_SCORE_BONUS)
wisdomBonus = models.IntegerField(default=DEFAULT_ABILITY_SCORE_BONUS)
charismaBonus = models.IntegerField(default=DEFAULT_ABILITY_SCORE_BONUS)
# Outdated code
# abilityScoreBonusSetID = models.IntegerField() # Same level of abstraction?
# character = models.ForeignKey(Character, on_delete=models.CASCADE, null=True)
def __str__(self):
return self.raceName
# This class is largely static, like a lookup table
class CharacterClass(models.Model):
# TODO: Maybe use ManyToMany relationship, as one character may have multiple
# classes... Oh wait. That's actually something to consider...
# character = models.ForeignKey(Character, on_delete=models.CASCADE, null=True, blank=True)
characterID = models.AutoField(primary_key=True)
className = models.CharField(max_length = MAX_LENGTH_CLASS_NAME)
hitDice = models.CharField(max_length = MAX_LENGTH_HIT_DICE)
def __str__(self):
return self.className
# This class is dynamic, the level, xp, hp, alignment, and (rarely) size may change
class Character(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, default=defaultUser, null=True, blank=True)
characterID = models.AutoField(primary_key=True) # Note that Django has a built-in primary key
characterName = models.CharField(max_length = MAX_LENGTH_CHARACTER_NAME) # Is this a consistent level of abstraction?
level = models.IntegerField(default=DEFAULT_LEVEL) # may have to split this up into a list as you may have multiple classes...
xp = models.IntegerField(default=DEFAULT_XP)
maxHP = models.IntegerField(default=DEFAULT_HP)
currentHP = models.IntegerField(default=DEFAULT_HP)
alignment = models.CharField(max_length = MAX_LENGTH_ALIGNMENT) # Use string or an enum?
size = models.CharField(max_length = MAX_LENGTH_SIZE) # Use string or enum?
public = models.BooleanField(default=True)
# blank=true, null=true means that it's optional
# Since race and class are constant, you DO NOT want to delete them upon
# deleting this character.
# Also, when restarting the database, it's important to only add one
# foreign key per migration. As such, there are currently 3
# foreign keys in character: User, race, and characterClass.
# The current solution: comment out all but 1, make and migrate, then repeat
# one at a time.
race = models.ForeignKey(CharacterRace, on_delete=models.PROTECT, default=defaultRace, null=True, blank=True)
characterClass = models.ForeignKey(CharacterClass, on_delete=models.PROTECT, default=defaultClass, null=True, blank=True)
# Outdated variables
#raceID = models.IntegerField()
#classID = models.IntegerField()
#abilityScoreSetID = models.AutoField(primary_key=True)
strength = models.IntegerField(default=DEFAULT_ABILITY_SCORE)
dexterity = models.IntegerField(default=DEFAULT_ABILITY_SCORE)
constitution = models.IntegerField(default=DEFAULT_ABILITY_SCORE)
intelligence = models.IntegerField(default=DEFAULT_ABILITY_SCORE)
wisdom = models.IntegerField(default=DEFAULT_ABILITY_SCORE)
charisma = models.IntegerField(default=DEFAULT_ABILITY_SCORE)
# This method returns a string that represents this class.
# Similar to toString() from java
def __str__(self):
return self.characterName
# Should associate a user with the character when initialized
def save_model(self, request, obj, form, change):
if obj.user == defaultUser:
# Only set user during the first save.
obj.user = request.user
#super().save_model(request, obj, form, change)
# When you create/update a character, this is where the
# page goes to after you save the character
def get_absolute_url(self):
return reverse('character-detail', kwargs={'pk': self.pk})
# This class is static, like a lookup table
class AbilityScore(models.Model):
abilityName = models.CharField(max_length = MAX_LENGTH_ABILITY_NAME)
# This class is dynamic, the abilityScoreValues may change
# Now outdated, refactored so that we don't have to access another form
# from within a form (there were 2 forms on a page, and you had to access it again)
class AbilityScoreSet(models.Model):
abilityScoreSetID = models.AutoField(primary_key=True)
character = models.ForeignKey(Character, on_delete=models.CASCADE)#, default=defaultCharacter)
# One set has many ability scores.
# However, each ability score may go to multiple sets (like an enumeration)
# Thus a manyToMany relationship is used
# Note: only one of the two classes should have a manyToMany Field
# abilityScores = models.ManyToManyField(AbilityScore)
# abilityScoreValue = models.IntegerField()
strength = models.IntegerField(default=DEFAULT_ABILITY_SCORE)
dexterity = models.IntegerField(default=DEFAULT_ABILITY_SCORE)
constitution = models.IntegerField(default=DEFAULT_ABILITY_SCORE)
intelligence = models.IntegerField(default=DEFAULT_ABILITY_SCORE)
wisdom = models.IntegerField(default=DEFAULT_ABILITY_SCORE)
charisma = models.IntegerField(default=DEFAULT_ABILITY_SCORE)
# Needed to save model
def save_model(self, request, obj, form, change):
# Updates the character to be the one it's associated with
# if obj.character = defaultCharacter :
super().save_model(request, obj, form, change)
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,216
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Campaign_Manager/tests.py
|
from django.test import TestCase
from .models import *
from Character_Builder.models import *
# Campaign test cases
class CampaignCreateTestCase(TestCase):
def setUp(self):
Campaign.objects.create(campaignName="The Mountain")
def test_campaign_name(self):
try:
Campaign.objects.get(campaignName="The Mountain")
except:
self.fail()
def test_campaign_id(self):
campaign = Campaign.objects.get(campaignName="The Mountain")
self.assertEqual(campaign.campaignID, 1)
# CampaignDM test cases
class CampaignDMCreateTestCase(TestCase):
def setUp(self):
Campaign.objects.create(campaignName="The Mountain")
camp = Campaign.objects.get(campaignName="The Mountain")
CampaignDM.objects.create(campaign=camp)
def test_campaign_dm_id(self):
camp = Campaign.objects.get(campaignName="The Mountain")
dm = CampaignDM.objects.get(campaign=camp)
self.assertEqual(dm.campaignDMID, 1)
def test_campaign_dm_campaign(self):
camp = Campaign.objects.get(campaignName="The Mountain")
try:
dm = CampaignDM.objects.get(campaign=camp)
except:
self.fail()
class PartyCreateTestCase(TestCase):
def setUp(self):
Campaign.objects.create(campaignName="The Mountain")
camp = Campaign.objects.get(campaignName="The Mountain")
Party.objects.create(campaign=camp)
def test_party_id(self):
camp = Campaign.objects.get(campaignName="The Mountain")
party = Party.objects.get(campaign=camp)
self.assertEqual(party.partyID, 1)
def test_party_campaign(self):
camp = Campaign.objects.get(campaignName="The Mountain")
try:
Party.objects.get(campaign=camp)
except:
self.fail()
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,217
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Users/admin.py
|
from django.contrib import admin
from .models import Profile
from .models import Friend
admin.site.register(Profile)
admin.site.register(Friend)
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,218
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Character_Builder/management/commands/populate_class_db.py
|
from django.core.management.base import BaseCommand
from Character_Builder.models import (
CharacterClass
)
# Populates the user base with 2 simple default users
class Command(BaseCommand):
# args = '<foo bar ...>'
help = 'Populate the Class data base'
def _create_classes(self):
# As fighter was declared as the "default class", it
# is therefore commented out here to prevent redundancy.
# fighter = CharacterClass(
# className='Fighter',
# hitDice='d10'
# )
# fighter.save()
barbarian = CharacterClass(
className='Barbarian',
hitDice='d12'
)
barbarian.save()
bard = CharacterClass(
className='Bard',
hitDice='d8'
)
bard.save()
cleric = CharacterClass(
className='Cleric',
hitDice='d8'
)
cleric.save()
druid = CharacterClass(
className='Druid',
hitDice='d8'
)
druid.save()
monk = CharacterClass(
className='Monk',
hitDice='d8'
)
monk.save()
paladin = CharacterClass(
className='Paladin',
hitDice='d10'
)
paladin.save()
ranger = CharacterClass(
className='Ranger',
hitDice='d10'
)
ranger.save()
rogue = CharacterClass(
className='Rogue',
hitDice='d8'
)
rogue.save()
sorcerer = CharacterClass(
className='Sorcerer',
hitDice='d6'
)
sorcerer.save()
warlock = CharacterClass(
className='Warlock',
hitDice='d8'
)
warlock.save()
wizard = CharacterClass(
className='Wizard',
hitDice='d6'
)
wizard.save()
def handle(self, *args, **options):
self._create_classes()
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,219
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Inventory/migrations/0001_initial.py
|
# Generated by Django 2.1.7 on 2019-04-08 02:08
import Inventory.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('Character_Builder', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Inventory',
fields=[
('inventoryID', models.AutoField(primary_key=True, serialize=False)),
('character', models.ForeignKey(default=Inventory.models.defaultCharacter, on_delete=django.db.models.deletion.CASCADE, to='Character_Builder.Character')),
],
),
migrations.CreateModel(
name='Item',
fields=[
('itemID', models.AutoField(primary_key=True, serialize=False)),
('itemName', models.CharField(max_length=255)),
('public', models.BooleanField(default=True)),
('inventory', models.ForeignKey(default=Inventory.models.defaultInventory, on_delete=django.db.models.deletion.CASCADE, to='Inventory.Inventory')),
('user', models.ForeignKey(blank=True, default=Inventory.models.defaultUser, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,220
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Users/models.py
|
from django.db import models
from django.contrib.auth.models import User
from PIL import Image
def defaultUser():
default = User.objects.first()
if default is None:
default = User.objects.create_user('defaultUser', password='djangoproject')
return default
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
image = models.ImageField(default='default.png', upload_to='profile_pics')
def __str__(self):
return f'{self.user.username} Profile'
def save(self, **kwargs):
super().save()
image = Image.open(self.image.path)
if image.width > 300 or image.height > 300:
output_size = (300, 300)
image.thumbnail(output_size)
image.save(self.image.path)
class FriendsList(models.Model):
owner = models.OneToOneField(User, on_delete=models.CASCADE)
def __str__(self):
return self.owner.username
class Friend(models.Model):
users = models.ManyToManyField(User, default=defaultUser)
current_user = models.ForeignKey(User, related_name='owner', null=True, on_delete=models.CASCADE)
@classmethod
def make_friend(cls, current_user, new_friend):
friend, created = cls.objects.get_or_create(
current_user = current_user
)
friend.users.add(new_friend)
@classmethod
def unfriend(cls, current_user, new_friend):
friend, created = cls.objects.get_or_create(
current_user = current_user
)
friend.users.remove(new_friend)
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,221
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Inventory/tests.py
|
from django.test import TestCase
from .models import *
from Character_Builder.models import *
class ItemCreateTestCase(TestCase):
def setUp(self):
Character.objects.create(characterName="Malikar", alignment="Lawful Evil", size="Medium")
char = Character.objects.get(characterName="Malikar")
Inventory.objects.create(character=char)
inv = Inventory.objects.get(character=char)
Item.objects.create(itemName="test item", inventory=inv)
def test_inventory_id(self):
char = Character.objects.get(characterName="Malikar")
inv = Inventory.objects.get(character=char)
self.assertEqual(inv.inventoryID, 1)
def test_inventory_character(self):
char = Character.objects.get(characterName="Malikar")
try:
Inventory.objects.get(character=char)
except:
self.fail()
def test_item_id(self):
item = Item.objects.get(itemName="test item")
self.assertEqual(item.itemID, 1)
def test_item_name(self):
try:
Item.objects.get(itemName="test item")
except:
self.fail()
def test_item_inventory(self):
char = Character.objects.get(characterName="Malikar")
inv = Inventory.objects.get(character=char)
item = Item.objects.get(itemName="test item")
self.assertEqual(item.inventory, inv)
def test_item_public(self):
item = Item.objects.get(itemName="test item")
self.assertEqual(item.public, True)
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,222
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Campaign_Manager/views.py
|
from django.shortcuts import render, redirect
from django.urls import reverse_lazy
from .models import *
from Character_Builder.models import Character
from Campaign_Manager.models import Campaign
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from .forms import CreateCampaignForm, CreatePostForm
from django.views.generic import (
ListView,
DetailView,
CreateView,
UpdateView,
DeleteView
)
from Users.models import *
from .urls import *
def home(request):
form = CreateCampaignForm(request.POST or None)
if form.is_valid():
form.instance.creator = request.user
form.save()
return HttpResponseRedirect(reverse('campaign-list'))
context = {
'title' : 'Campaigns',
'campaigns' : Campaign.objects.all(),
'characters' : Character.objects.all(),
'partys' : Party.objects.all(),
'partyCharacters' : PartyCharacter.objects.all(),
'campaignDMs' : CampaignDM.objects.all(),
'form' : form,
'posts': CampaignComment.objects.all(),
}
return render(request, 'Campaign_Manager/campaign_builder.html', context)
def overview(request, pk=None ):
#hazy on this. I want to set the campaign to the campaign ref'd by the pk
campaign = Campaign.objects.get(pk=pk)
party, created = Party.objects.get_or_create(campaign = campaign)
members = party.members.all()
friend, created = Friend.objects.get_or_create(current_user=request.user)
friends = friend.users.all()
friends |= User.objects.filter(pk = request.user.pk)
friendCharacters = None
for friend in friends:
if friendCharacters == None:
friendCharacters = Character.objects.filter(user = friend)
else:
friendCharacters |= Character.objects.filter(user = friend)
posts = CampaignComment.objects.filter(campaign = campaign)
dms = CampaignDM.objects.filter(campaign = campaign)
userIsDM = False
for dm in dms:
if dm.user == request.user:
userIsDM = True
context ={
'campaign' : campaign,
#'users' : User.objects.exclude(id=request.user.id),
'campaigns' : Campaign.objects.all(),
'characters' : Character.objects.all(),
'title' : 'Overview',
'members' : members,
'friends' : friends,
'dms' : dms,
'userIsDM' : userIsDM,
'posts' : posts,
'friendCharacters' : friendCharacters,
}
return render(request, 'Campaign_Manager/overview.html', context)
def update_party(request, operation, pk, id):
new_member = Character.objects.get(pk=pk)
campaign = Campaign.objects.get(pk=id)
if operation == 'add':
Party.add_member(campaign, new_member)
elif operation == 'remove':
Party.remove_member(campaign, new_member)
return redirect('overview_with_pk', pk=campaign.pk)
def confirmDeletion(request, pk):
campaign = Campaign.objects.get(pk=pk)
context = {
'campaign' : campaign,
}
return render(request, 'Campaign_Manager/campaign_confirm_deletion.html', context)
def deleteCampaign(request, pk):
campaign = Campaign.objects.get(pk=pk)
#Campaign.objects.delete(campaign)
campaign.delete()
return redirect('campaign-list')
class CampaignListView(ListView):
model = Campaign
context_object_name = 'campaigns'
class CampaignDetailView(DetailView):
model = Campaign
def get_context_data(self, **kwargs):
context=super().get_context_data(**kwargs)
campaign=self.get_object()
context['posts'] = CampaignComment.objects.filter(campaign=campaign)
dms = CampaignDM.objects.filter(campaign=campaign)
context['userIsDM'] = False
for dm in dms:
if dm.user == self.request.user:
context['userIsDM'] = True
return context
class CampaignCreateView(CreateView):
model = Campaign
fields = ['campaignName']
def form_valid(self, form):
form.instance.creator = self.request.user
return super().form_valid(form)
class CampaignCommentCreateView(CreateView):
model = CampaignComment
form_class = CreatePostForm
def get_context_data(self, **kwargs):
context = super(CampaignCommentCreateView, self).get_context_data(**kwargs)
campaign=Campaign.objects.get(pk=self.kwargs.get('pk'))
dms = CampaignDM.objects.filter(campaign=campaign)
context['userIsDM'] = False
for dm in dms:
if dm.user == self.request.user:
context['userIsDM'] = True
return context
def form_valid(self, form):
f = form.save(commit=False)
f.author = self.request.user
f.campaign = Campaign.objects.get(campaignID=self.kwargs['pk'])
f.save()
return super().form_valid(form)
def get_success_url(self):
return reverse_lazy('overview_with_pk', kwargs={'pk':self.kwargs['pk']})
class CampaignCommentDetailView(DetailView):
model = CampaignComment
def get_context_data(self, **kwargs):
context=super(CampaignCommentDetailView, self).get_context_data(**kwargs)
context['post'] = self.get_object()
context['author'] = self.get_object().author
return context
class CampaignCommentEditView(UpdateView):
model = CampaignComment
form_class = CreatePostForm
def get_context_data(self, **kwargs):
context = super(CampaignCommentEditView, self).get_context_data(**kwargs)
campaign=Campaign.objects.get(pk=self.kwargs.get('fk'))
dms = CampaignDM.objects.filter(campaign=campaign)
context['userIsDM'] = False
for dm in dms:
if dm.user == self.request.user:
context['userIsDM'] = True
return context
return context
def form_valid(self, form):
f = form.save(commit=False)
f.author = self.request.user
f.campaign = Campaign.objects.get(campaignID=self.kwargs['fk'])
f.save()
return super().form_valid(form)
def test_func(self):
post = self.get_object()
if self.request.user == post.author:
return True
return False
def get_success_url(self):
return reverse_lazy('overview_with_pk', kwargs={'pk':self.kwargs['fk']})
class CampaignCommentDeleteView(DeleteView):
model = CampaignComment
def get_context_data(self, **kwargs):
context=super(CampaignCommentDeleteView, self).get_context_data(**kwargs)
context['post'] = self.get_object()
context['author'] = self.get_object().author
return context
def test_func(self):
post = self.get_object()
if self.request.user == post.author:
return True
return False
def get_success_url(self):
return reverse_lazy('overview_with_pk', kwargs={'pk':self.kwargs['fk']})
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,223
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Campaign_Manager/migrations/0001_initial.py
|
# Generated by Django 2.1.7 on 2019-04-08 02:08
import Campaign_Manager.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('Character_Builder', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Campaign',
fields=[
('campaignID', models.AutoField(primary_key=True, serialize=False)),
('campaignName', models.CharField(max_length=255)),
('image', models.ImageField(default='default_campaign.jpg', upload_to='campaign_pics')),
('creator', models.ForeignKey(default=Campaign_Manager.models.defaultUser, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='CampaignComment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('content', models.TextField()),
('date', models.DateTimeField(default=django.utils.timezone.now)),
('image', models.ImageField(null=True, upload_to='comment_pics')),
('slug', models.SlugField(default='default-slug')),
('author', models.ForeignKey(default=Campaign_Manager.models.defaultUser, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('campaign', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Campaign_Manager.Campaign')),
],
),
migrations.CreateModel(
name='CampaignDM',
fields=[
('campaignDMID', models.AutoField(primary_key=True, serialize=False)),
('campaign', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='Campaign_Manager.Campaign')),
('user', models.ForeignKey(default=Campaign_Manager.models.defaultUser, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Party',
fields=[
('partyID', models.AutoField(primary_key=True, serialize=False)),
('campaign', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='campaign', to='Campaign_Manager.Campaign')),
('members', models.ManyToManyField(to='Character_Builder.Character')),
],
),
migrations.CreateModel(
name='PartyCharacter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('approved', models.BooleanField(default=False)),
('character', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Character_Builder.Character')),
],
),
]
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,224
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Users/views.py
|
from django.shortcuts import render, redirect
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.forms import User
from django.contrib import messages
from .forms import UserRegistrationForm, UserUpdateForm, ProfileUpdateForm
from Campaign_Manager .models import Campaign, Party, PartyCharacter
from Character_Builder.models import Character
from .models import Friend
def register(request):
if request.method == 'POST':
form = UserRegistrationForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
messages.success(request, f'Your account has been created! Please log in')
return redirect('login')
else:
form = UserRegistrationForm()
return render(request, 'Users/register.html', {'form': form})
def profile(request, pk=None ):
if pk:
user = User.objects.get(pk=pk)
else:
user = request.user
myCharacters = Character.objects.filter(user=user)
allCampaigns = Campaign.objects.all()
myCampaigns = Campaign.objects.filter(creator=user)
characterCampaigns = None
for mc in myCharacters:
if characterCampaigns==None:
characterCampaigns = PartyCharacter.objects.filter(character=mc)
else:
characterCampaigns |= PartyCharacter.objects.filter(character=mc)
if characterCampaigns != None:
for cc in characterCampaigns:
primaryKey=cc.party.campaign.pk
myCampaigns |= Campaign.objects.filter(pk=primaryKey)
for camp in allCampaigns:
parties = Party.objects.filter(campaign=camp)
for party in parties:
for mem in party.members.all():
for char in myCharacters:
if mem == char:
campSet = Campaign.objects.filter(pk=camp.pk)
myCampaigns |= campSet
context ={
'user' : user,
'users' : User.objects.exclude(id=request.user.id),
'campaigns' : myCampaigns,
'characters' : myCharacters,
'title' : 'Profile',
}
return render(request, 'Users/profile.html', context)
def friends(request):
user = User.objects.exclude(id=request.user.id)
friend, created = Friend.objects.get_or_create(current_user=request.user)
friends = friend.users.all()
context = {
'title' : 'Friends List',
'users' : user,
'friends': friends,
}
return render(request, 'Users/friends.html', context)
def edit_profile(request):
if request.method == 'POST':
u_form = UserUpdateForm(request.POST, instance=request.user)
p_form = ProfileUpdateForm(request.POST, request.FILES, instance=request.user.profile)
if u_form.is_valid() and p_form.is_valid():
u_form.save()
p_form.save()
messages.success(request, f'Your account has been updated!')
return redirect('profile')
else:
u_form = UserUpdateForm(instance=request.user)
p_form = ProfileUpdateForm(instance=request.user.profile)
context = {
'u_form': u_form,
'p_form': p_form
}
return render(request, 'Users/edit_profile.html', context)
def update_friends(request, operation, pk):
friend = User.objects.get(pk=pk)
if operation == 'add':
Friend.make_friend(request.user, friend)
elif operation == 'remove':
Friend.unfriend(request.user, friend)
return redirect('friends')
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,225
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Campaign_Manager/forms.py
|
from django import forms
from .models import Campaign, CampaignComment
class CreateCampaignForm(forms.ModelForm):
class Meta:
model = Campaign
fields = ['campaignName']
class CreatePostForm(forms.ModelForm):
image = forms.ImageField(required=False)
class Meta:
model = CampaignComment
fields = ['title', 'content', 'image']
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,226
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Character_Builder/management/commands/populate_race_db.py
|
from django.core.management.base import BaseCommand
from Character_Builder.models import (
CharacterRace
)
# Run this with "python manage.py populate_race_db"
# This is an
class Command(BaseCommand):
args = '<foo bar ...>'
help = 'our help string comes here'
def _create_races(self):
# # Because the models has a built-in defaultRace
# # that is called before running this script as it is migrating
# # this race is commented out.
# human = CharacterRace(
# raceName='Human',
# speed=30,
# size='Medium',
# strengthBonus=1,
# dexterityBonus=1,
# constitutionBonus=1,
# intelligenceBonus=1,
# wisdomBonus=1,
# charismaBonus=1
# )
# human.save()
dwarf = CharacterRace(
raceName='Dwarf',
speed=25,
size='Small',
strengthBonus=0,
dexterityBonus=0,
constitutionBonus=2,
intelligenceBonus=0,
wisdomBonus=0,
charismaBonus=0
)
dwarf.save()
elf = CharacterRace(
raceName='Elf',
speed=30,
size='Medium',
strengthBonus=0,
dexterityBonus=2,
constitutionBonus=0,
intelligenceBonus=0,
wisdomBonus=0,
charismaBonus=0
)
elf.save()
gnome = CharacterRace(
raceName='Gnome',
speed=30,
size='Medium',
strengthBonus=0,
dexterityBonus=0,
constitutionBonus=0,
intelligenceBonus=2,
wisdomBonus=0,
charismaBonus=0
)
gnome.save()
halfling = CharacterRace(
raceName='Halfling',
speed=30, #Not sure if this is fully accurate
size='Medium',
strengthBonus=0,
dexterityBonus=2,
constitutionBonus=0,
intelligenceBonus=0,
wisdomBonus=0,
charismaBonus=0
)
halfling.save()
halfOrc = CharacterRace(
raceName='Half-Orc',
speed=30,
size='Medium',
strengthBonus=2,
dexterityBonus=0,
constitutionBonus=1,
intelligenceBonus=0,
wisdomBonus=0,
charismaBonus=0
)
halfOrc.save()
tiefling = CharacterRace(
raceName='Tiefling',
speed=30,
size='Medium',
strengthBonus=0,
dexterityBonus=0,
constitutionBonus=0,
intelligenceBonus=1,
wisdomBonus=0,
charismaBonus=2
)
tiefling.save()
def handle(self, *args, **options):
self._create_races()
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,227
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Inventory/urls.py
|
from django.urls import path
from .views import (
ItemListView,
ItemDetailView,
ItemCreateView,
ItemEditView,
ItemDeleteView
)
from . import views
urlpatterns = [
path('', views.home, name='inventory-home'),
path('inventory/', ItemListView.as_view(), name='item-list'),
path('inventory/create/', ItemCreateView.as_view(), name='item-create'),
path('inventory/<int:pk>/', ItemDetailView.as_view(), name='item-detail'),
path('inventory/<int:pk>/edit', ItemEditView.as_view(), name='item-edit'),
path('inventory/<int:pk>/delete', ItemDeleteView.as_view(), name='item-delete'),
]
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,228
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Character_Builder/migrations/0001_initial.py
|
# Generated by Django 2.1.7 on 2019-04-08 02:08
import Character_Builder.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='AbilityScore',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('abilityName', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='AbilityScoreSet',
fields=[
('abilityScoreSetID', models.AutoField(primary_key=True, serialize=False)),
('strength', models.IntegerField(default=10)),
('dexterity', models.IntegerField(default=10)),
('constitution', models.IntegerField(default=10)),
('intelligence', models.IntegerField(default=10)),
('wisdom', models.IntegerField(default=10)),
('charisma', models.IntegerField(default=10)),
],
),
migrations.CreateModel(
name='Character',
fields=[
('characterID', models.AutoField(primary_key=True, serialize=False)),
('characterName', models.CharField(max_length=255)),
('level', models.IntegerField(default=0)),
('xp', models.IntegerField(default=0)),
('maxHP', models.IntegerField(default=6)),
('currentHP', models.IntegerField(default=6)),
('alignment', models.CharField(max_length=255)),
('size', models.CharField(max_length=255)),
('public', models.BooleanField(default=True)),
('strength', models.IntegerField(default=10)),
('dexterity', models.IntegerField(default=10)),
('constitution', models.IntegerField(default=10)),
('intelligence', models.IntegerField(default=10)),
('wisdom', models.IntegerField(default=10)),
('charisma', models.IntegerField(default=10)),
('user', models.ForeignKey(blank=True, default=Character_Builder.models.defaultUser, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='CharacterClass',
fields=[
('characterID', models.AutoField(primary_key=True, serialize=False)),
('className', models.CharField(max_length=255)),
('hitDice', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='CharacterRace',
fields=[
('raceID', models.AutoField(primary_key=True, serialize=False)),
('raceName', models.CharField(max_length=255)),
('speed', models.IntegerField()),
('size', models.CharField(max_length=255)),
('strengthBonus', models.IntegerField(default=0)),
('dexterityBonus', models.IntegerField(default=0)),
('constitutionBonus', models.IntegerField(default=0)),
('intelligenceBonus', models.IntegerField(default=0)),
('wisdomBonus', models.IntegerField(default=0)),
('charismaBonus', models.IntegerField(default=0)),
],
),
migrations.AddField(
model_name='abilityscoreset',
name='character',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Character_Builder.Character'),
),
]
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,229
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Campaign_Manager/migrations/0002_auto_20190408_1028.py
|
# Generated by Django 2.1.5 on 2019-04-08 14:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Campaign_Manager', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='party',
name='members',
field=models.ManyToManyField(to='Character_Builder.Character'),
),
]
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,230
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Campaign_Manager/urls.py
|
from django.urls import path, reverse, re_path
from django.utils.text import slugify
from .views import (
CampaignListView,
CampaignDetailView,
CampaignCommentCreateView,
CampaignCommentDetailView,
CampaignCommentEditView,
CampaignCommentDeleteView,
)
from django.urls import path
from . import views
from django.conf.urls import url
urlpatterns = [
path('',
views.home, name='campaign_builder-home'),
path('campaigns/',
CampaignListView.as_view(), name='campaign-list'),
path('campaigns/<int:pk>/',
views.overview, name = 'overview_with_pk'),
path('campaigns/<int:pk>/AddComment/',
CampaignCommentCreateView.as_view(), name='campaign-comment'),
path('campaigns/<int:fk>/<slug:slug>/',
CampaignCommentDetailView.as_view(), name='campaigncomment-detail'),
path('campaigns/<int:fk>/<slug:slug>/edit',
CampaignCommentEditView.as_view(), name='campaigncomment-edit'),
path('campaigns/<int:fk>/<slug:slug>/delete',
CampaignCommentDeleteView.as_view(), name='campaigncomment-delete'),
re_path(r'^connect/(?P<operation>.+)/(?P<pk>\d+)/(?P<id>\d+)/$', views.update_party, name='update_party'),
path('campaigns/<int:pk>/delete',
views.confirmDeletion, name = 'confirm-delete'),
path('campaigns/<int:pk>/delete/confirmed',
views.deleteCampaign, name = 'campaign-delete'),
]
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,231
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Character_Builder/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from django.views.generic import (
ListView,
DetailView,
CreateView,
UpdateView,
DeleteView
)
from django.http import HttpResponseRedirect
from .models import (
Character,
AbilityScoreSet,
CharacterRace,
CharacterClass
)
from .forms import (
CreateCharacterForm,
#EditCharacterForm,
EditAbilityScoresForm
)
def home(request):
form = CreateCharacterForm(request.POST or None)
if form.is_valid():
form.save()
context = {
'title': 'Home',
'form': form,
'characters' : Character.objects.all(),
}
return render(request, 'Character_Builder/character_builder-home.html', context)
# This is a class based view that uses django's built-in
# ListView view to display the characters
# It inherits from ListView
class CharacterListView(ListView):
model = Character
# template_name = 'CharacterBuilder/Character_builder-home.html'
context_object_name = 'characters'
class CharacterDetailView(DetailView):
model = Character
# context_object_name = 'characters'
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super().get_context_data(**kwargs)
# Add in the AbilityScore so it can print that as well
# context['abilityScores'] = AbilityScoreSet.objects.get_object(character = character)
return context
class CharacterCreateView(LoginRequiredMixin, CreateView):
model = Character
# Make sure this is updated once you change the form!
fields = [
'public',
'characterName',
'race',
'characterClass',
'level',
'xp',
'maxHP',
'currentHP',
'alignment',
'size',
'strength',
'dexterity',
'constitution',
'intelligence',
'wisdom',
'charisma'
]
# exclude = []
# Added for LoginRequiredMixin
login_url = '/login/'
# def __init__(self, *args, **kwargs):
# form.instance.user = self.request.user
# This gets the context which it passes to the html.
# The form1 that the html accesses is defined here.
def get_context_data(self, **kwargs):
context = super(CharacterCreateView, self).get_context_data(**kwargs)
# Actually, django has a built in form using the fields above...
# So I'm just going to use that built in form.
form = CreateCharacterForm(self.request.POST or None)
context['unusedform'] = form
# form2 = EditAbilityScoresForm(self.request.POST or None)
# context['form2'] = form2
return context
# def get_context_data(self, **kwargs):
# context = super(CampaignCommentCreateView, self).get_context_data(**kwargs)
# campaign=Campaign.objects.get(pk=self.kwargs.get('pk'))
# dms = CampaignDM.objects.filter(campaign=campaign)
# context['userIsDM'] = False
# for dm in dms:
# if dm.user == self.request.user:
# context['userIsDM'] = True
# return context
def form_valid(self, form):
# Updates the author of the current form to be the current user
form.instance.user = self.request.user
# context['form2'].instance.character = form.instance
return super().form_valid(form)
# TODO: Lookup how to manage this. Perhaps render a different context
# Or a "Sorry, not able to login" screen
def form_invalid(self, **kwargs):
return self.render_to_response(self.get_context_data(**kwargs))
class CharacterEditView(LoginRequiredMixin, UserPassesTestMixin, UpdateView):
model = Character
fields = [
'public',
'characterName',
'race',
'characterClass',
'level',
'xp',
'maxHP',
'currentHP',
'alignment',
'size',
'strength',
'dexterity',
'constitution',
'intelligence',
'wisdom',
'charisma'
]
# exclude = []
login_url = '/login/'
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
# Tests to ensure the logged-in user is the owner of that character...
def test_func(self):
Character = self.get_object()
if self.request.user == Character.user:
return True
return False
def get_context_data(self, **kwargs):
context = super(CharacterEditView, self).get_context_data(**kwargs)
# This grabs the self's request's information that is passed into
# the edit view data.
# For some reason, it does not properly fill in the information
# So I'm currently not using this, instead the character_form.html
# uses the 'form' that is built into it.
form = CreateCharacterForm(self.request.POST or None)
context['unusedForm1'] = form
# form2 = EditAbilityScoresForm(self.request.POST or None)
# context['form2'] = form2
return context
# TODO: Lookup how to manage this. Perhaps render a different context
# Or a "Sorry, not able to login" screen
def form_invalid(self, **kwargs):
return self.render_to_response(self.get_context_data(**kwargs))
class CharacterDeleteView(LoginRequiredMixin, UserPassesTestMixin, DeleteView):
model = Character
success_url = '/'
login_url = '/login/'
fail_url = '/login/' #Works?
def test_func(self):
Character = self.get_object()
if self.request.user == Character.user:
return True
return False
def home_page(request):
context = {
'title' : 'Welcome to DnD Manager!',
}
return render(request, 'Character_Builder/home.html', context)
# This is a class based view that uses django's built-in
# ListView view to display the races
# It inherits from ListView
class CharacterRaceListView(ListView):
model = CharacterRace
# template_name = 'CharacterBuilder/Character_builder-home.html'
context_object_name = 'races'
# This is a class based view that uses django's built-in
# ListView view to display the classes
# It inherits from ListView
class CharacterClassListView(ListView):
model = CharacterClass
# template_name = 'CharacterBuilder/Character_builder-home.html'
context_object_name = 'classes'
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,232
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Campaign_Manager/models.py
|
from django.db import models
from Character_Builder.models import Character
from django.contrib.auth.models import User
from django.utils import timezone
from django.utils.text import slugify
from django.urls import reverse
from PIL import Image
# Constants
MAX_LENGTH_CAMPAIGN_NAME = 255
# finds a default user
def defaultUser():
default = User.objects.first()
if default is None:
default = User.objects.create_user('defaultUser', password='djangoproject')
return default
# Keeps track of individual campaigns
class Campaign(models.Model):
creator = models.ForeignKey(User, on_delete=models.CASCADE, default=defaultUser)
campaignID = models.AutoField(primary_key=True)
campaignName = models.CharField(max_length = MAX_LENGTH_CAMPAIGN_NAME)
image = models.ImageField(default='default_campaign.jpg', upload_to='campaign_pics')
#add Description field
def __str__(self):
return self.campaignName
def save(self, **kwargs):
super().save()
image = Image.open(self.image.path)
if image.width > 900 or image.height > 600:
output_size = (900, 600)
image.thumbnail(output_size)
image.save(self.image.path)
dm, created = CampaignDM.objects.get_or_create(campaign=self, user=self.creator)
def get_absolute_url(self):
return reverse('overview_with_pk', kwargs={'pk': self.pk})
# Keeps track of DMs
class CampaignDM(models.Model):
campaignDMID = models.AutoField(primary_key=True)
user = models.ForeignKey(User, on_delete=models.CASCADE, default=defaultUser)
campaign = models.OneToOneField(Campaign, on_delete=models.CASCADE)
def __str__(self):
return self.user.username
# used to allow parties to store mulitple party members
class PartyCharacter(models.Model):
#party = models.ForeignKey(Party, on_delete=models.CASCADE)
character = models.ForeignKey(Character, on_delete=models.CASCADE)
approved = models.BooleanField(default = False, editable = True)
def __str__(self):
return self.character.characterName
# Keeps track of parties
class Party(models.Model):
#Keeping this in for now in case the new method doesn't work
partyID = models.AutoField(primary_key=True)
#campaign = models.OneToOneField(Campaign, on_delete=models.CASCADE)
campaign = models.ForeignKey(Campaign, related_name='campaign', on_delete=models.CASCADE)
members = models.ManyToManyField(Character)
@classmethod
def add_member(cls, campaign, new_member):
party, created = cls.objects.get_or_create(
campaign = campaign
)
party.members.add(new_member)
@classmethod
def remove_member(cls, campaign, new_member):
party, created = cls.objects.get_or_create(
campaign = campaign
)
party.members.remove(new_member)
def __str__(self):
return self.campaign.campaignName
class CampaignComment(models.Model):
title = models.CharField(max_length = 100)
content = models.TextField()
author = models.ForeignKey(User, on_delete=models.CASCADE, default=defaultUser)
date = models.DateTimeField(default=timezone.now)
campaign = models.ForeignKey(Campaign, on_delete=models.CASCADE)
image = models.ImageField(null=True, upload_to='comment_pics')
slug = models.SlugField(default=slugify("Default Slug"))
def save(self, *args, **kwargs):
self.slug = slugify(self.title + '' + str(self.date))
super(CampaignComment, self).save(*args, **kwargs)
if self.image != None:
image = Image.open(self.image.path)
if image.width > 500 or image.height > 500:
output_size = (500, 500)
image.thumbnail(output_size)
image.save(self.image.path)
def __str__(self):
return self.slug
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,233
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Character_Builder/forms.py
|
from django import forms
from .models import Character, AbilityScoreSet, AbilityScore
class CreateCharacterForm(forms.ModelForm):
class Meta:
model = Character
fields = [
'public',
'characterName',
'level',
'xp',
'maxHP',
'currentHP',
'alignment',
'size',
'strength',
'dexterity',
'constitution',
'intelligence',
'wisdom',
'charisma'
]
# Now unused to prevent needing to access another from within a form.
class EditAbilityScoresForm(forms.ModelForm):
class Meta:
model = AbilityScoreSet
fields = [
# 'character',
'strength',
'dexterity',
'constitution',
'intelligence',
'wisdom',
'charisma'
]
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,234
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Inventory/models.py
|
from django.db import models
from Character_Builder.models import Character
from django.contrib.auth.models import User
from django.urls import reverse
# Constants
MAX_LENGTH_ITEM_NAME = 255
# Creates a default character
def defaultCharacter():
default = Character.objects.first()
if default is None:
default = Character.objects.create(characterName='Default Character', alignment='Lawful Good', size='Medium')
return default
# finds a default user
def defaultUser():
default = User.objects.first()
if default is None:
default = User.objects.create_user('defaultUser', password='djangoproject')
return default
# Keeps track of inventories
class Inventory(models.Model):
inventoryID = models.AutoField(primary_key=True)
character = models.ForeignKey(Character, on_delete=models.CASCADE, default=defaultCharacter)
def __str__(self):
return self.character.characterName
# Creates default inventory
def defaultInventory():
default = Inventory.objects.first()
if default is None:
dc = Character.objects.create(characterName='Default Character', alignment='Lawful Good', size='Medium')
default = Inventory.objects.create(character=dc)
return default
# Keeps track of individual items
class Item(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, default=defaultUser, null=True, blank=True)
itemID = models.AutoField(primary_key=True)
itemName = models.CharField(max_length = MAX_LENGTH_ITEM_NAME)
inventory = models.ForeignKey(Inventory, on_delete=models.CASCADE, default=defaultInventory)
public = models.BooleanField(default=True)
# Should associate a user with the character when initialized
def save_model(self, request, obj, form, change):
if obj.user == defaultUser:
# Only set user during the first save.
obj.user = request.user
# this is where the page goes to after you save
def get_absolute_url(self):
return reverse('item-detail', kwargs={'pk': self.pk})
def __str__(self):
return self.itemName
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,235
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Character_Builder/tests.py
|
from django.test import TestCase
from .models import *
# Test Constants
DEFAULT_LEVEL = 0
DEFAULT_XP = 0
DEFAULT_HP = 6
class CharacterCreateTestCase(TestCase):
def setUp(self):
Character.objects.create(characterName="Malikar", alignment="Lawful Evil", size="Medium")
def test_character_id(self):
char = Character.objects.get(characterName="Malikar")
self.assertEqual(char.characterID, 1)
def test_character_name(self):
try:
Character.objects.get(characterName="Malikar")
except:
self.fail()
def test_character_level(self):
char = Character.objects.get(characterName="Malikar")
self.assertEqual(char.level, DEFAULT_LEVEL)
def test_character_xp(self):
char = Character.objects.get(characterName="Malikar")
self.assertEqual(char.xp, DEFAULT_XP)
def test_character_max_hp(self):
char = Character.objects.get(characterName="Malikar")
self.assertEqual(char.maxHP, DEFAULT_HP)
def test_character_current_hp(self):
char = Character.objects.get(characterName="Malikar")
self.assertEqual(char.currentHP, DEFAULT_HP)
def test_character_alignment(self):
char = Character.objects.get(characterName="Malikar")
self.assertEqual(char.alignment, "Lawful Evil")
def test_character_size(self):
char = Character.objects.get(characterName="Malikar")
self.assertEqual(char.size, "Medium")
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,236
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Users/tests.py
|
from django.test import TestCase
from django.contrib.auth.models import User
from .models import Profile
class ProfileCreationTestCase(TestCase):
def test_profile_created_upon_user_creation(self):
User.objects.create_user('TestCaseUser', email='test@email.com', password='testpassword')
user = User.objects.get(username='TestCaseUser')
try:
Profile.objects.get(user = user)
except Profile.DoesNotExist as e:
self.fail('Profile was not created when User was created:', e)
def test_each_user_has_a_profile(self):
user_list = User.objects.all()
for user in user_list:
try:
Profile.objects.get(user = user)
except Profile.DoesNotExist as e:
self.fail('User does not have a Profile:', e)
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,237
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Inventory/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from django.views.generic import (
ListView,
DetailView,
CreateView,
UpdateView,
DeleteView
)
from django.http import HttpResponseRedirect
from .models import *
from .forms import CreateItemForm
# Home view
def home(request):
form = CreateItemForm(request.POST or None)
if form.is_valid():
form.save()
context = {
'title' : 'Inventory',
'items' : Item.objects.all(),
'form' : form
}
return render(request, 'Inventory/item-home.html', context)
# This is a class based view that uses django's built-in
# ListView view to display the inventorys
# It inherits from ListView
class ItemListView(ListView):
model = Item
# template_name = 'InventoryBuilder/Inventory_builder-home.html'
context_object_name = 'items'
class ItemDetailView(DetailView):
model = Item
# context_object_name = 'inventorys'
#context_object_name = 'items'
class ItemCreateView(LoginRequiredMixin, CreateView):
model = Item
fields = ['itemName']
# exclude = []
login_url = '/login/'
# def __init__(self, *args, **kwargs):
# form.instance.user = self.request.user
def form_valid(self, form):
# Updates the author of the current form to be the current user
form.instance.user = self.request.user
return super().form_valid(form)
class ItemEditView(LoginRequiredMixin, UserPassesTestMixin, UpdateView):
model = Item
fields = ['itemName']
# exclude = []
login_url = '/login/'
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
# Tests to ensure the logged-in user is the owner of that inventory...
def test_func(self):
Item = self.get_object()
if self.request.user == Item.user:
return True
return False
class ItemDeleteView(LoginRequiredMixin, UserPassesTestMixin, DeleteView):
model = Item
success_url = '/inventory/inventory'
login_url = '/login/'
fail_url = '/login/' #Works?
def test_func(self):
Item = self.get_object()
if self.request.user == Item.user:
return True
return False
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,238
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Character_Builder/management/commands/populate_user_db.py
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
# Populates the user base with 2 simple default users
class Command(BaseCommand):
# args = '<foo bar ...>'
help = 'Populate the user base with two simple default users'
def _create_users(self):
defaultuser1 = User.objects.create_user(
'defaultuser1',
password='djangoproject'
)
defaultuser1.save()
defaultuser2 = User.objects.create_user(
'defaultuser2',
password='djangoproject'
)
defaultuser2.save()
def handle(self, *args, **options):
self._create_users()
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,239
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Character_Builder/urls.py
|
from django.urls import path
from .views import (
CharacterListView,
CharacterDetailView,
CharacterCreateView,
CharacterEditView,
CharacterDeleteView,
CharacterRaceListView,
CharacterClassListView,
)
from . import views
urlpatterns = [
path('',
views.home, name='character_builder-home'),
# path('', PostListView.as_view(), name='character_builder-home'),
path('characters/',
CharacterListView.as_view(), name='character-list'),
path('characters/create/',
CharacterCreateView.as_view(), name='character-create'),
path('characters/<int:pk>/',
CharacterDetailView.as_view(), name='character-detail'),
path('characters/<int:pk>/edit',
CharacterEditView.as_view(), name='character-edit'),
path('characters/<int:pk>/delete',
CharacterDeleteView.as_view(), name='character-delete'),
path('races/',
CharacterRaceListView.as_view(), name='characterRace-list'),
path('classes/',
CharacterClassListView.as_view(), name='characterClass-list'),
]
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,240
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Character_Builder/apps.py
|
from django.apps import AppConfig
class CharacterBuilderConfig(AppConfig):
name = 'Character_Builder'
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,241
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Character_Builder/migrations/0003_character_characterclass.py
|
# Generated by Django 2.1.7 on 2019-04-08 02:09
import Character_Builder.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Character_Builder', '0002_character_race'),
]
operations = [
migrations.AddField(
model_name='character',
name='characterClass',
field=models.ForeignKey(blank=True, default=Character_Builder.models.defaultClass, null=True, on_delete=django.db.models.deletion.PROTECT, to='Character_Builder.CharacterClass'),
),
]
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,242
|
NickJacksonDev/DnD-Manager
|
refs/heads/master
|
/Character_Builder/createRaces.py
|
import os
os.system("python manage.py shell")
# Unneded attempt. lol
|
{"/Campaign_Manager/tests.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py"], "/Users/admin.py": ["/Users/models.py"], "/Character_Builder/management/commands/populate_class_db.py": ["/Character_Builder/models.py"], "/Inventory/migrations/0001_initial.py": ["/Inventory/models.py"], "/Inventory/tests.py": ["/Inventory/models.py", "/Character_Builder/models.py"], "/Campaign_Manager/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Campaign_Manager/forms.py", "/Users/models.py", "/Campaign_Manager/urls.py"], "/Campaign_Manager/migrations/0001_initial.py": ["/Campaign_Manager/models.py"], "/Users/views.py": ["/Campaign_Manager/models.py", "/Character_Builder/models.py", "/Users/models.py"], "/Campaign_Manager/forms.py": ["/Campaign_Manager/models.py"], "/Character_Builder/management/commands/populate_race_db.py": ["/Character_Builder/models.py"], "/Inventory/urls.py": ["/Inventory/views.py"], "/Character_Builder/migrations/0001_initial.py": ["/Character_Builder/models.py"], "/Campaign_Manager/urls.py": ["/Campaign_Manager/views.py"], "/Character_Builder/views.py": ["/Character_Builder/models.py", "/Character_Builder/forms.py"], "/Campaign_Manager/models.py": ["/Character_Builder/models.py"], "/Character_Builder/forms.py": ["/Character_Builder/models.py"], "/Inventory/models.py": ["/Character_Builder/models.py"], "/Character_Builder/tests.py": ["/Character_Builder/models.py"], "/Users/tests.py": ["/Users/models.py"], "/Inventory/views.py": ["/Inventory/models.py"], "/Character_Builder/urls.py": ["/Character_Builder/views.py"], "/Character_Builder/migrations/0003_character_characterclass.py": ["/Character_Builder/models.py"]}
|
20,248
|
294486709/AMLMG2
|
refs/heads/master
|
/Model Generator.py
|
from PyQt5.QtGui import QPixmap, QDrag, QStandardItemModel, QStandardItem, QFont, QIcon, QCursor
from PyQt5.QtWidgets import QApplication, QMainWindow, QFileSystemModel, QMessageBox, QWidget, QLabel, \
QTabWidget, QListView, QListWidget, QLineEdit, QListWidgetItem, QAbstractItemView, QTableWidget,QTableWidgetItem, QHeaderView, QComboBox
from PyQt5.QtCore import QDir, QCoreApplication, Qt, QMimeData, QSize, QModelIndex
from MainForm import Ui_MainWindow
import sys
import Layers
import os
TempTarget = []
class TrackableWidgetItem(QLineEdit):
PropertyFont = QFont('arial')
PropertyFont.setPointSize(10)
def __init__(self, Name, Data, ins=None):
super(TrackableWidgetItem, self).__init__(ins)
self.setFont(self.PropertyFont)
self.setText('nA')
self.textChanged.connect(self.Changed)
self.Name = Name
self.Data = Data
def Changed(self):
global TempTarget
if self.Data.attributes[self.Name] == 'INT':
if not self.text().isnumeric():
A = QMessageBox.warning(self, 'Warning', 'Int only')
self.setText('0')
return
else:
self.Data.attributes[self.Name + '_value'] = self.text()
TempTarget = self.Data
ChangeUpdate(ui.tabWidget.currentWidget().focusWidget())
if self.Data.attributes[self.Name] == 'INT1':
print(self.text())
if self.text().isnumeric():
if int(self.text()) >= 100 or int(self.text()) < 1:
A = QMessageBox.warning(self, 'Warning', 'Int between 0 - 100')
self.setText('80')
return
else:
self.Data.attributes[self.Name + '_value'] = self.text()
TempTarget = self.Data
ChangeUpdate(ui.tabWidget.currentWidget().focusWidget())
else:
A = QMessageBox.warning(self, 'Warning', 'Int between 0 - 100')
self.setText('80')
return
if self.Data.attributes[self.Name] == 'NAME':
self.Data.attributes[self.Name + '_value'] = self.text()
TempTarget = self.Data
ChangeUpdate(ui.tabWidget.currentWidget().focusWidget())
print(self.Data.attributes[self.Name])
# Item Changed
class NewComboBox(QComboBox):
PropertyFont = QFont('arial')
PropertyFont.setPointSize(10)
def __init__(self, target, each, IndexCounter):
super(NewComboBox, self).__init__(parent=None)
targetValue = each + '_value'
self.addItems(target.attributes[each])
self.setCurrentIndex(target.attributes[targetValue])
self.setFont(self.PropertyFont)
self.data = target
self.targetValue = targetValue
self.currentIndexChanged.connect(self.Update)
self.IndexCounter = IndexCounter
def Update(self):
print('pressssss')
self.data.attributes[self.targetValue] = self.currentIndex()
global TempTarget
TempTarget = self.data
ChangeUpdate(ui.tabWidget.currentWidget().focusWidget())
def wheelEvent(self, QWheelEvent):
if self.hasFocus():
QComboBox.wheelEvent(QWheelEvent)
class NewListWidget(QListWidget):
item_list = []
Factory = Layers.LayerFactory()
PropertyFont = QFont('arial')
PropertyFont.setPointSize(10)
def __init__(self, parent=None):
super(NewListWidget, self).__init__(parent)
self.setAcceptDrops(True)
self.setDragDropMode(2)
print(11)
def AddNewItem(self, Type):
index = len(self.item_list)
self.item_list.append(self.Factory.make(Type, index))
def dropEvent(self, event):
if event.mimeData().hasFormat('application/x-qabstractitemmodeldatalist'):
data = event.mimeData()
source_item = QStandardItemModel()
source_item.dropMimeData(data, Qt.CopyAction, 0, 0, QModelIndex())
Instruction = source_item.item(0, 0).text()
if event.source() != self:
event.setDropAction(Qt.CopyAction)
TempItem = QListWidgetItem()
TempItem.setText(Instruction)
TempItem.setTextAlignment(Qt.AlignCenter)
# TempItem.setData()
self.addItem(TempItem)
self.AddNewItem(Instruction)
else:
event.setDropAction(Qt.MoveAction)
PrevIndex = self.selectedIndexes()[0].row()
super(NewListWidget, self).dropEvent(event)
CurrentIndex = self.selectedIndexes()[0].row()
self.ItemSwap(PrevIndex, CurrentIndex)
self.UpdateIndex()
else:
event.ignore()
def ItemSwap(self, Prev, Current):
traget = self.item_list.pop(Prev)
self.item_list.insert(Current, traget)
def UpdateIndex(self):
for i in range(len(self.item_list)):
self.item_list[i].attributes['index'] = i
def mousePressEvent(self, QMouseEvent):
super().mousePressEvent(QMouseEvent)
print('pressed')
current = self.selectedIndexes()[0].row()
self.ManageProperty(current)
def ManageProperty(self, index):
ui.tableWidget.setRowCount(0)
ui.tableWidget.setFont(self.PropertyFont)
ui.tableWidget.horizontalHeader().setDefaultSectionSize(120)
ui.tableWidget.setColumnCount(2)
ui.tableWidget.setHorizontalHeaderLabels(['Name', 'Value'])
SkipList = ['type']
target = self.item_list[index]
RowCounter = 0
IndexCounter = 0
for each in target.attributes:
if each in SkipList or each[-6:] == '_value':
IndexCounter += 1
continue
if each == 'index':
target.attributes['index'] = self.currentIndex().row()
tempItem = QTableWidgetItem('index')
tempItem.setTextAlignment(Qt.AlignCenter)
tempItem.setFont(self.PropertyFont)
tempItem.setFlags(Qt.ItemIsEnabled)
tempItem.setBackground(Qt.gray)
ui.tableWidget.insertRow(RowCounter)
ui.tableWidget.setItem(RowCounter, 0 , tempItem)
tempItem = QTableWidgetItem(str(self.currentIndex().row() + 1))
# tempItem.setTextAlignment(Qt.AlignCenter)
tempItem.setFont(self.PropertyFont)
tempItem.setFlags(Qt.ItemIsEnabled)
tempItem.setBackground(Qt.gray)
ui.tableWidget.setItem(RowCounter, 1 , tempItem)
RowCounter += 1
continue
if target.attributes[each] == 'NA':
continue
NameItem = QTableWidgetItem(each)
NameItem.setTextAlignment(Qt.AlignCenter)
NameItem.setFont(self.PropertyFont)
NameItem.setFlags(Qt.ItemIsEnabled)
NameItem.setBackground(Qt.gray)
ui.tableWidget.insertRow(RowCounter)
ui.tableWidget.setItem(RowCounter, 0, NameItem)
if type(target.attributes[each]) == type([]):
comboBox = NewComboBox(target, each, IndexCounter)
comboBox.setFocusPolicy(Qt.StrongFocus)
ui.tableWidget.setCellWidget(RowCounter, 1, comboBox)
# comboBox.currentIndexChanged.connect(lambda: self.ChangeUpdate(RowCounter, targetValue))
else:
changeableWidget = TrackableWidgetItem(each, target)
changeableWidget.setText(str(target.attributes[each+'_value']))
ui.tableWidget.setCellWidget(RowCounter, 1, changeableWidget)
pass
RowCounter += 1
IndexCounter += 1
def focusWidget(self):
print(self)
def ChangeUpdate(self):
print('changed')
global TempTarget
Index = TempTarget.attributes['index']
self.item_list[Index] = TempTarget
class MainForm(Ui_MainWindow):
TabList = []
TabListO = []
ListWidgetO = []
ItemFont = QFont('arial')
ItemFont.setPointSize(20)
# Form init
def __init__(self, MainWindow):
super(MainForm, self).setupUi(MainWindow)
self.SetTreeWedgit()
self.SetTabWidegt()
self.SetListLayer()
self.pushButton_2.clicked.connect(self.GenerateModel)
def SetTreeWedgit(self):
Model = QFileSystemModel()
Model.setRootPath(QDir.currentPath())
self.treeView.setModel(Model)
self.treeView.setRootIndex(Model.index(QDir.currentPath()))
self.treeView.setAnimated(False)
self.treeView.setIndentation(20)
self.treeView.setSortingEnabled(False)
self.treeView.hideColumn(1)
self.treeView.hideColumn(2)
self.treeView.hideColumn(3)
self.treeView.doubleClicked.connect(self.TreeViewDoubleClicked)
# get the full path of the double clicked item
def TreeViewDoubleClicked(self):
item = self.treeView.selectedIndexes()
if item:
item = item[0]
TreeList = []
while item.parent().data():
TreeList.append(item.data())
item = item.parent()
BasePath = ''
TreeList.reverse()
for element in TreeList:
BasePath += '/'
BasePath += element
_translate = QCoreApplication.translate
self.AddTab(BasePath, TreeList[len(TreeList)-1])
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tabWidget), _translate("MainWindow", TreeList[len(TreeList)-1]))
def tabWidgetDoubleClicked(self):
CurrentIndex = self.tabWidget.currentIndex()
self.tabWidget.removeTab(CurrentIndex)
self.TabList.pop(CurrentIndex)
self.TabListO.pop(CurrentIndex)
# check input file type
def AddTab(self, FilePath, FileName):
if FileName[-3:] != '.py':
QMessageBox.warning(self.treeView, 'Warning', 'Cannot open file:\n Wrong extension')
elif FileName in self.TabList:
QMessageBox.warning(self.treeView, 'Warning', 'Cannot open file:\n Instance existed')
else:
self.LoadFile(FilePath, FileName)
# load new tab
def LoadFile(self, FilePath, FileName):
temp = QWidget()
temp.setAcceptDrops(False)
self.tabWidget.addTab(temp, FileName)
self.TabList.append(FileName)
self.TabListO.append(temp)
# add widget
ScrollAreaName = FileName + '_SA'
ListViewName = FileName + '_LV'
Index = self.TabList.index(FileName)
# target item
self.tabWidget.widget(Index)
# add scroll area to new tab
print(temp)
TempListWidget = NewListWidget(temp)
# TempScrollArea.setWidgetResizable(True)
TempListWidget.setMinimumSize(QSize(481, 654))
TempListWidget.setMaximumSize(QSize(481, 654))
# TempListView.setGeometry(0,0,200,100)
TempListWidget.setObjectName(ScrollAreaName)
TempListWidget.setAutoFillBackground(True)
self.ListWidgetO.append(TempListWidget)
TempListWidget.setAcceptDrops(True)
TempListWidget.setDragDropMode(2)
TempListWidget.setDefaultDropAction(0)
TempListWidget.itemDoubleClicked.connect(self.RemoveItem)
TempListWidget.setFont(self.ItemFont)
TempListWidget.setItemAlignment(Qt.AlignHCenter)
print(TempListWidget.acceptDrops())
def RemoveItem(self, item):
reply = QMessageBox.question(self.treeView, "Confirmation", "Do you really want to delete this layer?", QMessageBox.Yes | QMessageBox.No)
if reply == 16384:
parent = item.listWidget()
index = parent.row(item)
parent.takeItem(parent.row(item))
parent.item_list.pop(index)
def SetTabWidegt(self):
self.tabWidget.tabBarDoubleClicked.connect(self.tabWidgetDoubleClicked)
# # Ready Page
# ##################################
# FileName = 'New Model'
# temp = QWidget()
# temp.setAcceptDrops(True)
# self.tabWidget.addTab(temp, FileName)
# self.TabList.append(FileName)
# self.TabListO.append(temp)
# # add widget
# ScrollAreaName = FileName + '_SA'
# ListViewName = FileName + '_LV'
# Index = self.TabList.index(FileName)
# # target item
# self.tabWidget.widget(Index)
#
# # add scroll area to new tab
# print(temp)
# TempScrollArea = QLabel(temp)
# # TempScrollArea.setWidgetResizable(True)
# TempScrollArea.setMinimumSize(QSize(200, 50))
# TempScrollArea.setMaximumSize(QSize(200, 50))
# TempScrollArea.setGeometry(150, 300, 0, 0)
# TempScrollArea.setAutoFillBackground(True)
# TempScrollArea.setAlignment(Qt.AlignCenter)
# TempScrollArea.setObjectName(ScrollAreaName)
# TempScrollArea.setAutoFillBackground(True)
# TempScrollArea.setText('Ready')
# ####################################
FileName = 'New Model'
temp = QWidget()
temp.setAcceptDrops(False)
self.tabWidget.addTab(temp, FileName)
self.TabList.append(FileName)
self.TabListO.append(temp)
# add widget
ScrollAreaName = FileName + '_SA'
ListViewName = FileName + '_LV'
Index = self.TabList.index(FileName)
# target item
self.tabWidget.widget(Index)
# add scroll area to new tab
print(temp)
TempListWidget = NewListWidget(temp)
# TempScrollArea.setWidgetResizable(True)
TempListWidget.setMinimumSize(QSize(481, 654))
TempListWidget.setMaximumSize(QSize(481, 654))
# TempListView.setGeometry(0,0,200,100)
TempListWidget.setObjectName(ScrollAreaName)
TempListWidget.setAutoFillBackground(True)
self.ListWidgetO.append(TempListWidget)
TempListWidget.setAcceptDrops(True)
TempListWidget.setDragDropMode(3)
TempListWidget.setDefaultDropAction(0)
TempListWidget.itemDoubleClicked.connect(self.RemoveItem)
TempListWidget.setFont(self.ItemFont)
# TempListWidget.setItemAlignment(Qt.AlignHCenter)
print(TempListWidget.acceptDrops())
def SetListLayer(self):
# Layers = ['Input', 'Conv1D', 'Conv2D', 'Conv3D', 'LSTM', 'Dense', 'RNN','Optimizer', 'Softmax', 'Output']
Layers = ['Input', 'Conv', 'Pooling', 'Dense', 'Flatten', 'Compile']
for layer in Layers:
temp = QListWidgetItem(layer)
# temp.setIcon(QIcon('File/Image/' + layer + '.jpg'))
temp.setFont(self.ItemFont)
temp.setTextAlignment(Qt.AlignHCenter)
self.listWidget.addItem(temp)
self.listWidget.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.listWidget.setDragEnabled(True)
def GenerateModel(self):
try:
targets = ui.tabWidget.currentWidget().focusWidget().item_list
except:
A = QMessageBox.warning(ui.tabWidget, 'Warning', 'Model not complete')
return
if not self.ModelCheck(targets):
A = QMessageBox.warning(ui.tabWidget, 'Warning', 'Model Invalid')
return
FileName = targets[0].attributes['model_name_value']
if not self.ModelNameCheck(FileName):
return
self.GenKerasTF2(targets, FileName)
def GenKerasTF2(self, targets, FileName):
File = open(FileName, 'w')
File.write('# This script is generated by AMLGM2, support TF2.0 only\n')
File.write('import tensorflow as tf\n')
File.write('from tensorflow.keras import layers, models\n')
File.write('import numpy as np\n')
File.write('# Model starts here\n')
File.write('model = models.Sequential()\n')
Generator = Layers.InstructionFactory()
for index in range(1, len(targets)):
temp = targets[index]
statement = Generator.GenerateInstruction(temp, targets[0])
File.write(statement)
File.close()
def ModelNameCheck(self, FileName):
if FileName in os.listdir():
A = QMessageBox.warning(ui.tabWidget, 'Warning', 'File Existed, override?', QMessageBox.Yes | QMessageBox.No)
if A == 16384:
os.remove(FileName)
return True
else:
return False
return True
def ModelCheck(self, targets):
dangerlist = ['INPUT', 'COMPILE']
if len(targets) < 2:
return False
if targets[0].attributes['type'] != 'INPUT':
return False
if targets[len(targets)-1].attributes['type'] != 'COMPILE':
return False
for i in range(1, len(targets)-1):
if targets[i].attributes['type'] in dangerlist:
return False
return True
if __name__ == "__main__":
app = QApplication(sys.argv)
MainWindow = QMainWindow()
ui = MainForm(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
{"/Model Generator.py": ["/MainForm.py", "/Layers.py"]}
|
20,249
|
294486709/AMLMG2
|
refs/heads/master
|
/Layers.py
|
class CDLayer(object):
def __init__(self):
self.attributes = {}
self.attributes['type'] = 'TYPE'
self.attributes['index'] = 'INT'
self.attributes['units'] = 'INT'
self.attributes['units_value'] = 16
self.attributes['activation'] = ['relu', 'softmax', 'elu', 'selu', 'softplus', 'softsign', 'tanh',
'hard_sigmoid', 'linear']
self.attributes['activation_value'] = 0
self.attributes['use_bias'] = ['False', 'True']
self.attributes['use_bias_value'] = 0
self.attributes['kernel_initializer'] = ['None', 'truncatednormal', 'ones', 'initializer', 'randomnormal',
'randomuniform', 'variancescaling', 'orthogonal',
'identity', 'constant', 'zeros', 'glort_normal', 'florot_uniform',
'be_normal', 'lecun_normal', 'he_uniform', 'lecun_uniform']
self.attributes['kernel_initializer_value'] = 0
self.attributes['bias_initializer'] = ['None','truncatednormal', 'ones', 'initializer', 'randomnormal',
'randomuniform', 'variancescaling', 'orthogonal',
'identity', 'constant', 'zeros', 'glort_normal', 'florot_uniform',
'be_normal', 'lecun_normal', 'he_uniform', 'lecun_uniform']
self.attributes['bias_initializer_value'] = 0
self.attributes['kernel_regularizer'] = ['None','L1', 'L2']
self.attributes['kernel_regularizer_value'] = 0
self.attributes['bias_initializer'] = ['None','truncatednormal', 'ones', 'initializer', 'randomnormal',
'randomuniform', 'variancescaling', 'orthogonal',
'identity', 'constant', 'zeros', 'glort_normal', 'florot_uniform',
'be_normal', 'lecun_normal', 'he_uniform', 'lecun_uniform']
self.attributes['bias_initializer_value'] = 0
self.attributes['activity_regularizer'] = ['None','L1', 'L2']
self.attributes['activity_regularizer_value'] = 0
self.attributes['kernel_constraint'] = ['None','max_norm', 'non_neg', 'unit_norm', 'min_max_norm']
self.attributes['kernel_constraint_value'] = 0
self.attributes['bias_constraint'] = ['None','max_norm', 'non_neg', 'unit_norm', 'min_max_norm']
self.attributes['bias_constraint_value'] = 0
self.attributes['filters'] = 'INT'
self.attributes['filters_value'] = 16
self.attributes['kernel_size'] = 'INT'
self.attributes['kernel_size_value'] = 16
self.attributes['strides'] = 'INT'
self.attributes['strides_value'] = 2
self.attributes['padding'] = ['same', 'valid']
self.attributes['padding_value'] = 0
class Dense(CDLayer):
def __init__(self, index):
super().__init__()
self.attributes['type'] = 'Dense'
self.attributes['filters'] = 'NA'
self.attributes['kernel_size'] = 'NA'
self.attributes['strides'] = 'NA'
self.attributes['padding'] = 'NA'
self.attributes['index'] = index
class Conv(CDLayer):
def __init__(self, index):
super().__init__()
self.attributes['type'] = 'Conv'
self.attributes['cnn_type'] = ['Conv1D', 'Conv2D', 'Conv3D']
self.attributes['cnn_type_value'] = 1
self.attributes['units'] = 'NA'
self.attributes['index'] = index
class InputLayer(object):
def __init__(self, index):
self.attributes = {}
self.attributes['type'] = 'INPUT'
self.attributes['index'] = index
self.attributes['input_x_file'] = 'NAME'
self.attributes['input_x_file_value'] = 'xtrain.npy'
self.attributes['input_y_file'] = 'NAME'
self.attributes['input_y_file_value'] = 'ytrain.npy'
self.attributes['training_ratio'] = 'INT1'
self.attributes['training_ratio_value'] = 80
self.attributes['model_name'] = 'NAME'
self.attributes['model_name_value'] = 'model.py'
class OutputLayer(object):
def __init__(self, index):
self.attributes = {}
self.attributes['type'] = 'OUTPUT'
self.attributes['index'] = index
self.attributes['output_name'] = 'NAME'
self.attributes['output_name_value'] = ''
class Pooling(object):
def __init__(self, index):
self.attributes = {}
self.attributes['type'] = 'POOLING'
self.attributes['index'] = index
self.attributes['pooling_type'] = ['MaxPooling1D', 'MaxPooling2D', 'MaxPooling3D',
'AveragePooling1D', 'AveragePooling2D', 'AveragePooling3D']
self.attributes['pooling_type_value'] = 1
self.attributes['pool_size'] = 'INT'
self.attributes['pool_size_value'] = 2
self.attributes['strides'] = 'INT'
self.attributes['strides_value'] = 0
self.attributes['padding'] = ['valid', 'same']
self.attributes['padding_value'] = 0
class Compile(object):
def __init__(self, index):
self.attributes = {}
self.attributes['type'] = 'COMPILE'
self.attributes['index'] = index
self.attributes['optimizer'] = ['SGD', 'RMSprop', 'Adagrad', 'Adadelta', 'Adam', 'Adamax', 'Nadam']
self.attributes['optimizer_value'] = 0
self.attributes['loss'] = ['mean_squared_error', 'mean_absolute_error', 'mean_absolute_percentage_error',
'mean_squared_logarithmic_error', 'squared_hinge', 'hinge', 'categorical_hinge',
'logcosh', 'categorical_crossentropy', 'sparse_categorical_crossentropy',
'binary_crossentropy', 'kullback_leibler_divergence', 'poission', 'cosine_proximity']
self.attributes['loss_value'] = 0
self.attributes['metrics'] = ['accuracy', 'None']
self.attributes['metrics_value'] = 0
self.attributes['batch_size'] = 'INT'
self.attributes['batch_size_value'] = 16
self.attributes['epoch'] = 'INT'
self.attributes['epoch_value'] = 5
class Flatten(object):
def __init__(self, index):
self.attributes = {}
self.attributes['type'] = 'FLATTEN'
self.attributes['index'] = index
class LayerFactory:
def __init__(self):
self.Product = []
def make(self, Type, index):
accept_list = ['Dense', 'Conv', 'Input', 'Output', 'Compile', 'Pooling', 'Flatten']
if Type not in accept_list:
print('Wrong input type')
raise TypeError
else:
if Type == 'Dense':
return Dense(index)
else:
if Type == 'Conv':
return Conv(index)
elif Type == 'Input':
return InputLayer(index)
elif Type == 'Output':
return OutputLayer(index)
elif Type == 'Compile':
return Compile(index)
elif Type == 'Pooling':
return Pooling(index)
elif Type == 'Flatten':
return Flatten(index)
class InstructionFactory(object):
def __init__(self):
pass
def PropertyManage(self, statement, target, skiplist):
attributes = target.attributes
print('----------------')
print(target)
for i in attributes:
print(i)
if i not in skiplist and i[-6:] != '_value':
if i == 'strides':
try:
matrix = self.MatrixGen(attributes['cnn_type_value']+1, attributes['strides_value'])
except KeyError:
try:
matrix = self.MatrixGen((attributes['pooling_type_value'] + 1) % 3, attributes['strides_value'])
except:
pass
statement += ', {}={}'.format('strides', matrix)
continue
if attributes[i] == 'NA':
continue
if attributes[i][int(attributes[i + '_value'])] != 'None' and attributes[i][int(attributes[i + '_value'])] != 'False':
statement += ', {}=\'{}\''.format(i, attributes[i][int(attributes[i + '_value'])])
else:
continue
else:
continue
statement += '))\n'
return statement
def GenerateInstruction(self, temp, temp0):
attributes = temp.attributes
print(temp)
if type(temp) == type(Dense(999)):
skiplist = ['type', 'index', 'units', 'activation', 'strides']
statement = 'model.add(layers.Dense({}, activation=\'{}\''.format(attributes['units_value'], attributes['activation'][(int(attributes['activation_value']))])
statement = self.PropertyManage(statement, temp, skiplist)
return statement
elif type(temp) == type(Flatten(999)):
statement = 'model.add(layers.Flatten())\n'
return statement
elif type(temp) == type(Conv(999)):
matrix = self.MatrixGen(attributes['cnn_type_value']+1, attributes['kernel_size_value'])
statement = 'model.add(layers.{}({}, {}, activation=\'{}\''.format(attributes['cnn_type'][attributes['cnn_type_value']], attributes['filters_value'], matrix, attributes['activation'][(int(attributes['activation_value']))])
skiplist = ['type', 'index', 'units', 'activation', 'kernel_size', 'filters', 'cnn_type', 'pooling_type']
statement = self.PropertyManage(statement, temp, skiplist)
return statement
elif type(temp) == type(Pooling(999)):
skiplist = ['type', 'index', 'units', 'activation', 'pool_size', 'pooling_type']
matrix = self.MatrixGen((attributes['pooling_type_value'] + 1)%3, attributes['pool_size_value'])
statement = 'model.add(layers.{}({}'.format(attributes['pooling_type'][int(attributes['pooling_type_value'])], matrix)
statement = self.PropertyManage(statement, temp, skiplist)
return statement
elif type(temp) == type(Compile(999)):
statement = 'model.compile(optimizer=\'{}\', loss=\'{}\', metrics=[\'{}\'])\n'.format(attributes['optimizer'][int(attributes['optimizer_value'])],
attributes['loss'][int(attributes['loss_value'])],
attributes['metrics'][int(attributes['metrics_value'])])
statement += 'xtrain = np.load({})\n'.format(temp0.attributes['input_x_file_value'])
statement += 'ytrain = np.load({})\n'.format(temp0.attributes['input_y_file_value'])
statement += 'model.fit({}, {}, epochs={}, batch_size={})\n'.format('xtrain', 'ytrain', temp.attributes['epoch_value'], temp.attributes['batch_size_value'])
print(statement)
return statement
def MatrixGen(self, CnnType, Value):
matrix = '('
for i in range(CnnType):
matrix += '{}, '.format(Value)
matrix = matrix[:-2]
matrix += ')'
return matrix
|
{"/Model Generator.py": ["/MainForm.py", "/Layers.py"]}
|
20,250
|
294486709/AMLMG2
|
refs/heads/master
|
/MainForm.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mainwindow.ui'
#
# Created by: PyQt5 UI code generator 5.12.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1024, 768)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QtCore.QSize(1024, 768))
MainWindow.setMaximumSize(QtCore.QSize(1024, 768))
self.centralWidget = QtWidgets.QWidget(MainWindow)
self.centralWidget.setObjectName("centralWidget")
self.groupBox = QtWidgets.QGroupBox(self.centralWidget)
self.groupBox.setGeometry(QtCore.QRect(10, 10, 211, 611))
self.groupBox.setObjectName("groupBox")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.treeView = QtWidgets.QTreeView(self.groupBox)
self.treeView.setObjectName("treeView")
self.horizontalLayout.addWidget(self.treeView)
self.groupBox_2 = QtWidgets.QGroupBox(self.centralWidget)
self.groupBox_2.setGeometry(QtCore.QRect(230, 10, 491, 611))
self.groupBox_2.setObjectName("groupBox_2")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.groupBox_2)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setSpacing(0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.tabWidget = QtWidgets.QTabWidget(self.groupBox_2)
self.tabWidget.setObjectName("tabWidget")
self.horizontalLayout_2.addWidget(self.tabWidget)
self.groupBox_3 = QtWidgets.QGroupBox(self.centralWidget)
self.groupBox_3.setGeometry(QtCore.QRect(730, 10, 281, 711))
self.groupBox_3.setObjectName("groupBox_3")
self.groupBox_4 = QtWidgets.QGroupBox(self.groupBox_3)
self.groupBox_4.setGeometry(QtCore.QRect(10, 30, 271, 411))
self.groupBox_4.setObjectName("groupBox_4")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.groupBox_4)
self.horizontalLayout_3.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_3.setSpacing(0)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.listWidget = QtWidgets.QListWidget(self.groupBox_4)
self.listWidget.setObjectName("listWidget")
self.horizontalLayout_3.addWidget(self.listWidget)
self.groupBox_5 = QtWidgets.QGroupBox(self.groupBox_3)
self.groupBox_5.setGeometry(QtCore.QRect(10, 440, 271, 261))
self.groupBox_5.setObjectName("groupBox_5")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.groupBox_5)
self.horizontalLayout_4.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_4.setSpacing(0)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.tableWidget = QtWidgets.QTableWidget(self.groupBox_5)
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(0)
self.tableWidget.setRowCount(0)
self.horizontalLayout_4.addWidget(self.tableWidget)
self.pushButton = QtWidgets.QPushButton(self.centralWidget)
self.pushButton.setGeometry(QtCore.QRect(410, 640, 171, 61))
self.pushButton.setObjectName("pushButton")
self.pushButton_2 = QtWidgets.QPushButton(self.centralWidget)
self.pushButton_2.setGeometry(QtCore.QRect(90, 640, 171, 61))
self.pushButton_2.setObjectName("pushButton_2")
MainWindow.setCentralWidget(self.centralWidget)
self.menuBar = QtWidgets.QMenuBar(MainWindow)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 1024, 22))
self.menuBar.setObjectName("menuBar")
self.menuFile = QtWidgets.QMenu(self.menuBar)
self.menuFile.setObjectName("menuFile")
MainWindow.setMenuBar(self.menuBar)
self.statusBar = QtWidgets.QStatusBar(MainWindow)
self.statusBar.setObjectName("statusBar")
MainWindow.setStatusBar(self.statusBar)
self.action_Open = QtWidgets.QAction(MainWindow)
self.action_Open.setObjectName("action_Open")
self.action_New = QtWidgets.QAction(MainWindow)
self.action_New.setObjectName("action_New")
self.action_Save = QtWidgets.QAction(MainWindow)
self.action_Save.setObjectName("action_Save")
self.actionSave_As = QtWidgets.QAction(MainWindow)
self.actionSave_As.setObjectName("actionSave_As")
self.action_Exit = QtWidgets.QAction(MainWindow)
self.action_Exit.setObjectName("action_Exit")
self.menuFile.addAction(self.action_Open)
self.menuFile.addAction(self.action_New)
self.menuFile.addAction(self.action_Save)
self.menuFile.addAction(self.actionSave_As)
self.menuFile.addAction(self.action_Exit)
self.menuBar.addAction(self.menuFile.menuAction())
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(-1)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.groupBox.setTitle(_translate("MainWindow", "Browser"))
self.groupBox_2.setTitle(_translate("MainWindow", "Model"))
self.groupBox_3.setTitle(_translate("MainWindow", "Layers"))
self.groupBox_4.setTitle(_translate("MainWindow", "Layer Selection"))
self.groupBox_5.setTitle(_translate("MainWindow", "Property"))
self.pushButton.setText(_translate("MainWindow", "Save and Quit"))
self.pushButton_2.setText(_translate("MainWindow", "Generate Model"))
self.menuFile.setTitle(_translate("MainWindow", "&File"))
self.action_Open.setText(_translate("MainWindow", "&Open"))
self.action_New.setText(_translate("MainWindow", "&New"))
self.action_Save.setText(_translate("MainWindow", "&Save"))
self.actionSave_As.setText(_translate("MainWindow", "Save &As"))
self.action_Exit.setText(_translate("MainWindow", "&Exit"))
|
{"/Model Generator.py": ["/MainForm.py", "/Layers.py"]}
|
20,256
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/blockchain/__init__.py
|
"""Grouping for blockchain related things"""
from .blockchain import Blockchain as BlockchainDb
from .api import blockchain_api
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,257
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/provider_models.py
|
from typing import List
from datetime import datetime
from pydantic import BaseModel, EmailStr
from .common_models import Address, Name, PhoneNumbers, ProvidableTreatment
class Provider(BaseModel):
"""Model of a provider"""
providerId: str
name: Name
phoneNumbers: PhoneNumbers
addresses: List[Address]
dateOfBirth: datetime
email: EmailStr
providableTreatments: List[ProvidableTreatment]
class ProviderSearchResult(BaseModel):
"""Result of a provider search"""
providerId: str
name: Name
phoneNumbers: PhoneNumbers
addresses: List[Address]
providableTreatments: List[ProvidableTreatment]
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,258
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/test/load_testing/load_testing.py
|
import asyncio
import json
import uuid
import requests
async def create_client_request():
client_id = str(uuid.uuid4())
url = 'http://localhost:5000/client/{}'.format(client_id)
print('Using url: {}'.format(url))
json_data = build_json_client_payload(client_id)
resp = requests.post(url, json=json_data)
for i in range(0, 500):
await asyncio.gather(
put_request(url, json_data),
put_request(url, json_data),
put_request(url, json_data),
put_request(url, json_data),
put_request(url, json_data),
put_request(url, json_data),
put_request(url, json_data),
put_request(url, json_data),
put_request(url, json_data),
put_request(url, json_data),
put_request(url, json_data)
)
print(resp)
def build_json_client_payload(client_id):
data = '{"name":{"firstName":"W","middleName":"A","lastName":"H"},"phoneNumbers":{"home":"1234567890","mobile":"1234567890","work":"1234567890"},"address":{"unit":1,"streetAddress":"123 fake street","city":"Fake City","province":0,"country":"Canada","postalCode":"l1l1l1"},"dateOfBirth":"2021-01-29 23:50:58.272613","email":"a@a.com"}'
json_data = json.loads(data)
json_data['clientId'] = client_id
return json_data
async def put_request(url, json_payload):
resp = requests.put(url, json=json_payload)
return resp.status_code
asyncio.run(create_client_request())
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,259
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/main.py
|
"""Main entry point for application"""
import os
import logging
from starlette.middleware import Middleware
from starlette.middleware.cors import CORSMiddleware
from fastapi import FastAPI, status, Request
from .api import auth_api, client_api, provider_api, blockchain_api
import uuid
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)
logging.info("Starting")
if 'ENVIRONMENT' not in os.environ or os.environ['ENVIRONMENT'] == 'development':
logging.info("Development CORS policy enabled")
middleware = [ Middleware(
CORSMiddleware,
allow_origins=['http://localhost:3000', 'http://localhost:*', 'https://app.dev.blockmedisolutions.com'],
allow_credentials=True,
allow_methods=['*'],
allow_headers=['*']
)]
app = FastAPI(middleware=middleware)
@app.middleware("http")
async def add_correlation_header(request: Request, call_next):
correlation_id = str(uuid.uuid4())
response = await call_next(request)
response.headers["X-Correlation-Id"] = correlation_id
return response
app.include_router(auth_api)
app.include_router(client_api)
app.include_router(provider_api)
app.include_router(blockchain_api)
@app.get('/api/health', status_code=status.HTTP_200_OK)
def health():
"""Health check endpoint for use by ECS"""
return True
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,260
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/test/load_testing/async_test.py
|
import asyncio
async def foo(n):
return n + 1
async def main():
tasks = []
for i in range(7, 11):
tasks.append(foo(i))
result = await asyncio.gather(*tasks)
print(result)
return result
res = asyncio.run(main())
print(res)
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,261
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/client_models.py
|
from typing import List
from datetime import datetime
from pydantic import BaseModel, EmailStr
from .common_models import Address, Name, PhoneNumbers
class LinkedProvider(BaseModel):
providerId: str
providerName: str
hasAccess: bool
class Client(BaseModel):
clientId: str
name: Name
phoneNumbers: PhoneNumbers
address: Address
dateOfBirth: datetime
email: EmailStr
linkedProviders: List[LinkedProvider] = []
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,262
|
monoper/BlockchainDB
|
refs/heads/main
|
/src/blockchain/api/blockchain_routes.py
|
"""Routes that are related to the actual blockchain"""
from fastapi import Depends, APIRouter, status
from ..blockchain import Blockchain as BlockchainDb
from ..models import ProposedBlock
api = APIRouter(
prefix="/api/blockchain",
tags=["blockchain"],
dependencies=[Depends(BlockchainDb)],
responses={404: {"description": "Not found"}},
)
@api.get("/health")
def get_client(database: BlockchainDb = Depends()):
"""Endpoint to validate the blockchain as a whole"""
return 200 if database.validate() else 400
@api.post("/validate-block", status_code=status.HTTP_200_OK)
def update_client(proposed_block: ProposedBlock, database: BlockchainDb = Depends()):
"""Endpoint to validate a single block"""
return database.get_proposed_block_hash(proposed_block)
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,263
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/provider_routes.py
|
"""Routes for provider api"""
import uuid
from typing import Optional
from fastapi import Depends, APIRouter, status
from fastapi.exceptions import HTTPException
from .provider_models import Provider, ProviderSearchResult
from .common_models import Appointment, Provinces, ProvidableTreatment, AppointmentStatus, Address
from .blockchain import BlockchainDb
from .util import verify_auth_header
from .client_models import Client, LinkedProvider
api = APIRouter(
prefix="/api/provider",
tags=["providers"],
dependencies=[Depends(BlockchainDb),Depends(verify_auth_header)],
responses={404: {"description": "Not found"}},
)
@api.get("/{provider_id}", response_model=Provider, status_code=status.HTTP_200_OK)
def get_provider(provider_id: str, database: BlockchainDb = Depends()):
"""Returns a single provider"""
result = database.find_one('Provider', {'providerId': provider_id})
if result is None:
raise HTTPException(status_code=404, detail='Provider not found')
return Provider(**result)
@api.put("/{provider_id}", status_code=status.HTTP_200_OK)
def update_provider(provider_id: str, provider: Provider, database: BlockchainDb = Depends()):
"""Updates a provider"""
if provider.providerId != provider_id:
raise HTTPException(status_code=400,
detail='Provider id in query parameter doesn\'t match payload')
database.commit_transaction(provider, 'EDIT', 'Provider', 'providerId', provider_id)
@api.get("/{provider_id}/providable-treatments", status_code=status.HTTP_200_OK)
def get_provider_providable_treatments(provider_id: str, database: BlockchainDb = Depends()):
"""Gets the treatments a provider can provide to a client"""
result = database.find_one('Provider', {'providerId': provider_id})
if result is None:
raise HTTPException(status_code=404, detail='Provider not found')
return Provider(**result).providableTreatments
@api.post("/{provider_id}/providable-treatments", status_code=status.HTTP_200_OK)
def add_provider_providable_treatment(provider_id: str, providableTreatment: ProvidableTreatment,
database: BlockchainDb = Depends()):
"""Adds a treatments that a provider can provide to a client"""
result = database.find_one('Provider', {'providerId': provider_id})
if result is None:
raise HTTPException(status_code=404, detail='Provider not found')
provider = Provider(**result)
for existing_providable_treatment in provider.providableTreatments:
if existing_providable_treatment.name.lower() == providableTreatment.name.lower():
raise HTTPException(status_code=400,
detail=f'Providable treatment: \
{providableTreatment.name} already exists')
providableTreatment.providableTreatmentId = str(uuid.uuid4())
provider.providableTreatments.append(providableTreatment)
database.commit_transaction(provider, 'EDIT', 'Provider', 'providerId', provider_id)
@api.post("/{provider_id}/address", status_code=status.HTTP_200_OK)
def add_provider_address(provider_id: str, address: Address,
database: BlockchainDb = Depends()):
"""
Adds provider address
"""
result = database.find_one('Provider', {'providerId': provider_id})
if result is None:
raise HTTPException(status_code=404, detail='Provider not found')
provider = Provider(**result)
address.addressId = str(uuid.uuid4())
provider.addresses.append(address)
database.commit_transaction(provider, 'EDIT', 'Provider', 'providerId', provider_id)
@api.delete("/{provider_id}/providable-treatments/{providable_treatment_id}", status_code=status.HTTP_200_OK)
def delete_provider_providable_treatment(provider_id: str,
providable_treatment_id: str,
database: BlockchainDb = Depends()):
"""Adds a treatments that a provider can provide to a client"""
result = database.find_one('Provider', {'providerId': provider_id})
if result is None:
raise HTTPException(status_code=404, detail='Provider not found')
provider = Provider(**result)
providable_treatments = []
for existing_providable_treatment in provider.providableTreatments:
if existing_providable_treatment.providableTreatmentId != providable_treatment_id:
providable_treatments.append(existing_providable_treatment)
provider.providableTreatments = providable_treatments
database.commit_transaction(provider, 'EDIT', 'Provider', 'providerId', provider_id)
@api.delete("/{provider_id}/address/{address_id}", status_code=status.HTTP_200_OK)
def delete_provider_address(provider_id: str,
address_id: str,
database: BlockchainDb = Depends()):
"""
Removes an address from a provider
"""
result = database.find_one('Provider', {'providerId': provider_id})
if result is None:
raise HTTPException(status_code=404, detail='Provider not found')
provider = Provider(**result)
addresses = []
for existing_address in provider.addresses:
if existing_address.addressId != address_id:
addresses.append(existing_address)
provider.addresses = addresses
database.commit_transaction(provider, 'EDIT', 'Provider', 'providerId', provider_id)
@api.get("/{provider_id}/appointments", status_code=status.HTTP_200_OK)
def get_provider_appointments(provider_id: str, database: BlockchainDb = Depends()):
"""Gets appointments that are assigned to a provider"""
result = database.find('Appointment', {'providerId': provider_id})
if result is None:
return {}
return result
@api.get("/{provider_id}/appointments/{appointment_id}", status_code=status.HTTP_200_OK)
def get_provider_appointment(provider_id: str, appointment_id: str,
database: BlockchainDb = Depends()):
"""Gets a single appoint that is assigned to a provider"""
result = database.find_one('Appointment',
{'providerId': provider_id, 'appointmentId': appointment_id})
if result is None:
raise HTTPException(status_code=404, detail='Appointment not found')
return result
@api.put("/{provider_id}/appointments/{appointment_id}/accept", status_code=status.HTTP_200_OK)
def accept_provider_appointment(provider_id: str,
appointment_id: str,
database: BlockchainDb = Depends()):
"""Accepts an appointment that is assigned to a provider"""
appointment = database.find_one('Appointment',
{'providerId': provider_id, 'appointmentId': appointment_id})
if appointment is None:
raise HTTPException(status_code=404, detail='Appointment not found')
updated_appointment = Appointment(**appointment)
updated_appointment.status = AppointmentStatus.Accepted
#need to add protect so that only 1 create block can exist for a given ID
result = database.commit_transaction(updated_appointment, 'EDIT',
'Appointment', 'appointmentId', appointment_id)
return result
@api.put("/{provider_id}/appointments/{appointment_id}/reject", status_code=status.HTTP_200_OK)
def reject_provider_appointment(provider_id: str,
appointment_id: str,
database: BlockchainDb = Depends()):
"""Rejects an appointment that is assigned to a provider"""
appointment = database.find_one('Appointment',
{'providerId': provider_id, 'appointmentId': appointment_id})
if appointment is None:
raise HTTPException(status_code=404, detail='Appointment not found')
updated_appointment = Appointment(**appointment)
updated_appointment.status = AppointmentStatus.Rejected
#need to add protect so that only 1 create block can exist for a given ID
result = database.commit_transaction(updated_appointment, 'EDIT',
'Appointment', 'appointmentId', appointment_id)
return result
@api.put("/{provider_id}/appointments/{appointment_id}", status_code=status.HTTP_200_OK)
def update_provider_appointment(provider_id: str,
appointment_id: str,
appointment: Appointment,
database: BlockchainDb = Depends()):
if appointment.providerId != provider_id or appointment.appointmentId != appointment_id:
raise HTTPException(status_code=400,
detail='Provider id in query parameter doesn\'t match payload')
existing_appointment = Appointment(**database.find_one('Appointment',
{'providerId': provider_id, 'appointmentId': appointment_id}))
if existing_appointment.status == AppointmentStatus.Completed \
or existing_appointment.status == AppointmentStatus.Rejected:
raise HTTPException(status_code=400,
detail='Cannot update a completed or rejected appointment')
#need to add protect so that only 1 create block can exist for a given ID
result = database.commit_transaction(appointment, 'EDIT',
'Appointment', 'appointmentId', appointment_id)
related_client_result = database.find_one('Client', { 'clientId': appointment.clientId})
if related_client_result is None:
raise HTTPException(status_code=404, detail='Client related to appointment not found')
related_client = Client(**related_client_result)
if not any(linked_provider.providerId == appointment.providerId
for linked_provider in related_client.linkedProviders):
raise HTTPException(status_code=403)
if result is None:
raise HTTPException(status_code=404, detail='Appointment not found')
return result
@api.get("/search/available", status_code=status.HTTP_200_OK)
def search_provider(name: Optional[str]=None, city: Optional[str]=None,
province: Optional[Provinces]=None, database: BlockchainDb = Depends()):
"""Searches for a provider based on nothing, a name, a city or a province"""
query = {}
if name is not None:
name_query = { "name.firstName": { '$regex' : f'^{name}'} }
query = {**name_query}
#
if city is not None:
city_query = { "address.city": { '$regex' : f'^{city}'} }
query = {**query, **city_query}
if province is not None:
province_query = { "address.province": province }
query = {**query, **province_query}
raw_results = database.find('Provider', query)
results = []
for raw_result in raw_results:
results.append(ProviderSearchResult(**raw_result))
return results
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,264
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/auth_routes.py
|
import os
import uuid
from pycognito import Cognito
from fastapi import APIRouter, Depends, status
from .blockchain import BlockchainDb
from .client_models import Client
from .provider_models import Provider
from .util import verify_auth_header
from .auth_models import RegisterClient, RegisterProvider, SignIn, ConfirmSignUp, \
ChangePassword, ConfirmForgotPassword, ForgotPassword, Token, \
User, SignInResponse
api = APIRouter(
prefix="/api/auth",
tags=["authentication"],
dependencies=[Depends(BlockchainDb)],
responses={404: {"description": "Not found"}},
)
@api.get('/verify-token',
status_code=status.HTTP_200_OK,
dependencies=[Depends(verify_auth_header)])
def verify_token():
pass
@api.post('/register-client', response_model=str, status_code=status.HTTP_200_OK)
def register_client(client: RegisterClient, database: BlockchainDb = Depends()):
aws_cognito = Cognito(os.environ['USER_POOL_ID'], os.environ['USER_POOL_WEB_CLIENT_ID'])
aws_cognito.username = client.username
aws_cognito.set_base_attributes(email=client.username, name=f'{client.name.firstName}')
aws_cognito.add_custom_attributes(usertype='client')
response = aws_cognito.register(client.username, client.password)
client.address.addressId = uuid.uuid4()
database.commit_transaction(Client(clientId=response['UserSub'],
name=client.name,
phoneNumbers=client.phoneNumbers,
address=client.address,
dateOfBirth=client.dateOfBirth,
email=client.email).dict(),
'CREATE', 'Client', 'clientId', response['UserSub'])
return response['UserSub']
@api.post('/register-provider', response_model=str, status_code=status.HTTP_200_OK)
def register_provider(provider: RegisterProvider, database: BlockchainDb = Depends()):
aws_cognito = Cognito(os.environ['USER_POOL_ID'], os.environ['USER_POOL_WEB_CLIENT_ID'])
aws_cognito.username = provider.username
aws_cognito.set_base_attributes(email=provider.username, name=f'{provider.name.firstName}')
aws_cognito.add_custom_attributes(usertype='provider')
response = aws_cognito.register(provider.username, provider.password)
for providable_treatment in provider.providableTreatments:
providable_treatment.providableTreatmentId = uuid.uuid4()
for address in provider.addresses:
address.addressId = uuid.uuid4()
try:
database.commit_transaction(Provider(providerId=response['UserSub'],
name=provider.name,
phoneNumbers=provider.phoneNumbers,
addresses=provider.addresses,
dateOfBirth=provider.dateOfBirth,
email=provider.email,
providableTreatments=provider.providableTreatments).dict(),
'CREATE', 'Provider', 'providerId', response['UserSub'])
return response['UserSub']
except:
aws_cognito.delete_user()
return status.HTTP_400_BAD_REQUEST
@api.post("/sign-in", response_model=SignInResponse, status_code=status.HTTP_200_OK)
def sign_in(user_sign_in: SignIn):
aws_cognito = Cognito(os.environ['USER_POOL_ID'], os.environ['USER_POOL_WEB_CLIENT_ID'])
aws_cognito.username = user_sign_in.username
aws_cognito.authenticate(password=user_sign_in.password)
user = aws_cognito.get_user(attr_map={"usertype": "custom:usertype","user_id":"sub"})
usertype = user._data["custom:usertype"]
user_id = user.sub
resp = {"user": User(userId=user_id, username=user.username, usertype=usertype), "token":Token(**aws_cognito.__dict__)}
return SignInResponse(**resp)
@api.post("/confirm-registration", status_code=status.HTTP_200_OK)
def confirm_registration(confirm_sign_up: ConfirmSignUp):
aws_cognito = Cognito(os.environ['USER_POOL_ID'], os.environ['USER_POOL_WEB_CLIENT_ID'])
aws_cognito.confirm_sign_up(confirm_sign_up.verificationCode, username=confirm_sign_up.username)
@api.post("/sign-out", status_code=status.HTTP_200_OK)
def sign_out(token: str = Depends(verify_auth_header)):
aws_cognito = Cognito(os.environ['USER_POOL_ID'],
os.environ['USER_POOL_WEB_CLIENT_ID'],
access_token=token)
aws_cognito.logout()
@api.post("/change-password", status_code=status.HTTP_200_OK)
def change_password(user_change_password: ChangePassword, token: str = Depends(verify_auth_header)):
aws_cognito = Cognito(os.environ['USER_POOL_ID'],
os.environ['USER_POOL_WEB_CLIENT_ID'],
access_token=token)
aws_cognito.change_password(user_change_password.old_password, user_change_password.new_password)
@api.post("/forgot-password", status_code=status.HTTP_200_OK)
def forgot_password(user_forgot_password: ForgotPassword):
aws_cognito = Cognito(os.environ['USER_POOL_ID'],
os.environ['USER_POOL_WEB_CLIENT_ID'])
aws_cognito.username = user_forgot_password.username
aws_cognito.add_custom_attributes(email=user_forgot_password.username)
aws_cognito.initiate_forgot_password()
@api.post("/confirm-forgot-password", status_code=status.HTTP_200_OK)
def confirm_forgot_password(user_confirm_forgot_password: ConfirmForgotPassword):
aws_cognito = Cognito(os.environ['USER_POOL_ID'],
os.environ['USER_POOL_WEB_CLIENT_ID'])
aws_cognito.username = user_confirm_forgot_password.username
aws_cognito.add_custom_attributes(email=user_confirm_forgot_password.username)
aws_cognito.confirm_forgot_password(user_confirm_forgot_password.verification_code,
user_confirm_forgot_password.new_password)
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,265
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/test/aws_service_discovery/service_discovery.py
|
import boto3
import json
client = boto3.client('servicediscovery')
services = client.list_services()
for service in services['Services']:
print(service)
instances = client.list_instances(
ServiceId=service['Id'],
MaxResults=100
)
for instance in instances['Instances']:
print(instance['Attributes']['AWS_INSTANCE_IPV4'])
print(instance['Attributes']['AWS_INSTANCE_IPV4'])
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,266
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/client_routes.py
|
import uuid
from typing import List
from fastapi import Depends, APIRouter, status, HTTPException
from .client_models import Client, LinkedProvider
from .provider_models import Provider
from .common_models import Appointment, AppointmentStatus
from .blockchain import BlockchainDb
from .util import verify_auth_header
api = APIRouter(
prefix="/api/client",
tags=["clients"],
dependencies=[Depends(BlockchainDb),Depends(verify_auth_header)],
responses={404: {"description": "Not found"}},
)
@api.get("/{client_id}", response_model=Client, status_code=status.HTTP_200_OK)
def get_client(client_id: str, database: BlockchainDb = Depends()):
result = database.find_one('Client', {'clientId': client_id})
if result is None:
raise HTTPException(status_code=404, detail='Client not found')
return Client(**result)
@api.put("/{client_id}", status_code=status.HTTP_200_OK)
def update_client(client_id: str, client: Client, database: BlockchainDb = Depends()):
if client.clientId != client_id:
raise HTTPException(status_code=400,
detail='Client id in query parameter doesn\'t match payload')
database.commit_transaction(client, 'EDIT', 'Client', 'clientId', client_id)
@api.get("/{client_id}/appointments",
response_model=List[Appointment],
status_code=status.HTTP_200_OK)
def get_client_appointments(client_id: str, database: BlockchainDb = Depends()):
result = database.find('Appointment', {'clientId': client_id})
if result is None:
return []
return result
@api.get("/{client_id}/appointments/{appointment_id}",
response_model=Appointment,
status_code=status.HTTP_200_OK)
def get_client_appointment(client_id: str, appointment_id: str, database: BlockchainDb = Depends()):
result = database.find_one('Appointment',
{'clientId': client_id, 'appointmentId': appointment_id})
if result is None:
raise HTTPException(status_code=404, detail='Appointment not found')
return result
@api.post("/{client_id}/appointments", status_code=status.HTTP_200_OK)
def add_client_appointment(client_id: str, appointment: Appointment,
database: BlockchainDb = Depends()):
if appointment.clientId != client_id:
raise HTTPException(status_code=400,
detail=f'Client id ({client_id}) in query \
parameter doesn\'t match payload \
({appointment.clientId}) \
{client_id == appointment.clientId}')
#need to add protect so that only 1 create block can exist for a given ID
appointment.appointmentId = str(uuid.uuid4())
provider = Provider(**database.find_one('Provider', {'providerId': appointment.providerId}))
client = Client(**database.find_one('Client', {'clientId': client_id}))
if not any(linked_provider.providerId == provider.providerId
for linked_provider in client.linkedProviders):
client.linkedProviders.append(LinkedProvider(providerId=provider.providerId,
hasAccess=True,
providerName=f'{provider.name.firstName} {provider.name.lastName}'))
database.commit_transaction(client, 'EDIT', 'Client', 'clientId', client_id)
database.commit_transaction(appointment, 'CREATE', 'Appointment',
'appointmentId', appointment.appointmentId)
@api.post("/{client_id}/linked-provider/{provider_id}/toggle", status_code=status.HTTP_200_OK)
def toggle_client_linked_provider(client_id: str, provider_id: str,
database: BlockchainDb = Depends()):
client = Client(**database.find_one('Client', {'clientId': client_id}))
for index, linked_provider in enumerate(client.linkedProviders):
if linked_provider.providerId == provider_id:
linked_provider.hasAccess = not linked_provider.hasAccess
client.linkedProviders[index] = linked_provider
database.commit_transaction(client, 'EDIT', 'Client', 'clientId', client_id)
@api.put("/{client_id}/appointments/{appointment_id}", status_code=status.HTTP_200_OK)
def update_client_appointment(client_id: str, appointment_id: str,
appointment: Appointment, database: BlockchainDb = Depends()):
if appointment.clientId != client_id or appointment.appointmentId != appointment_id:
raise HTTPException(status_code=400,
detail='Client id in query parameter doesn\'t match payload')
if appointment.status == AppointmentStatus.Completed \
or appointment.status == AppointmentStatus.Rejected:
raise HTTPException(status_code=400,
detail='Cannot update a completed or rejected appointment')
result = database.commit_transaction(appointment, 'EDIT',
'Appointment', 'appointmentId', appointment_id)
if result is None:
raise HTTPException(status_code=400, detail='Could not update appointment')
return result
@api.get("/{client_id}/prescribed-treatments", status_code=status.HTTP_200_OK)
def get_client_prescribed_treatments(client_id: str, database: BlockchainDb = Depends()):
appointments = database.find('Appointment', { 'clientId' : client_id})
if appointments is None:
return []
prescribed_treatments = []
[prescribed_treatments.extend(appointment.prescribedTreatment) for appointment in appointments]
return prescribed_treatments
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,267
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/common_models.py
|
"""Models that are shared between providers, clients and appointments"""
from typing import List
import datetime
from enum import Enum
from pydantic import BaseModel, validator
class ProvidableTreatment(BaseModel):
"""Model for Providable Treatment"""
providableTreatmentId: str = str('')
name: str
description: str
class AppointmentStatus(Enum):
"""Enum model for an appointment status"""
Pending = 0
Accepted = 1
Rejected = 2
Completed = 3
InProgress = 4
class Provinces(Enum):
"""Enum model for an province"""
Ontario = 0
Manitoba = 1
Quebec = 2
Newfoundland = 3
Saskatchewan = 4
PrinceEdwardIsland = 5
BritishColumbia = 6
NovaScotia = 7
Yukon = 8
NorthwestTerritories = 9
Nunavut = 10
NewBrunswick = 11
class Name(BaseModel):
"""Model for Name"""
firstName: str
middleName: str
lastName: str
class PhoneNumbers(BaseModel):
"""Model for Providable Treatment"""
mobile: str
home: str
work: str
class Address(BaseModel):
"""Model for Address"""
addressId: str = str('')
unit: str
streetAddress: str
city: str
province: Provinces
country: str
postalCode: str
@validator('country')
def country_must_be_canada(cls, value):
if value.lower() != 'canada':
raise ValueError("Only Canada is supported as a country.")
return value
class PrescribedTreatment(ProvidableTreatment):
treatmentFrequency: str
startDate: datetime.datetime
endDate: datetime.datetime
class Notes(BaseModel):
noteId: str = str('')
createdDate: datetime.datetime
note: str
class Appointment(BaseModel):
"""Model for Appointment"""
appointmentId: str = str('')
clientId: str
providerId: str
reasonForAppointment: str
address: Address
date: datetime.datetime
status: AppointmentStatus = AppointmentStatus.Pending
attended: bool
cancellationReason: str = str('')
requestedTreatments: List[ProvidableTreatment]
prescribedTreatments: List[PrescribedTreatment] = []
notes: List[Notes] = []
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,268
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/blockchain/mongo.py
|
"""Class to handle mongodb database"""
import os
import logging
from pymongo import MongoClient
from .models import Block, generate_audit_block
class CreateBlockAlreadyExistsError(Exception):
def __init__(self, data_key_field_name, data_key_value):
self.message = f'Block of type CREATE cannot be created. \
Key: {data_key_field_name} and Id: {data_key_value} already exists'
class MongoDb:
"""
Wrapper for mongodb and performs some basic operations on the database
"""
def __init__(self):
if 'CONNECTION_STRING' in os.environ:
self.connection_string = os.environ['CONNECTION_STRING']
else:
raise ValueError('CONNECTION_STRING is required as an environment variable')
if 'DATABASE' in os.environ:
self.database_name = os.environ['DATABASE']
else:
raise ValueError('DATABASE is required as an environment variable')
def get_latest_hash(self):
latest_block = self.__get_database().Blocks.find_one({}, sort=[('_id', -1)])
if latest_block is None:
return ''
return latest_block['hash']
def commit_block(self, block: Block):
database = self.__get_database()
naked_block = block.get_naked_block()
if database.Blocks.count() == 0:
logging.info("Genisys block created")
database.Blocks.insert_one(vars(naked_block))
return
data_block = block.get_data_block()
data_key_value = str(block.data_key_value)
existing_block_query = {block.data_key_field_name: data_key_value, "block_type": 'CREATE'}
existing_collection_block_result = list(self.__get_database()[data_block.collection]
.find(filter=existing_block_query))
if len(existing_collection_block_result) > 0 and block.block_type == 'CREATE':
raise CreateBlockAlreadyExistsError(block.data_key_field_name, data_key_value)
existing_block_query_updated = {"$set": {"superceded": True}}
database[data_block.collection].update({block.data_key_field_name: data_key_value},
existing_block_query_updated, multi=True)
database.Blocks.insert_one(vars(naked_block))
database[data_block.collection].insert_one(data_block.get_document())
def get_block_count(self):
database = self.__get_database()
return database.Blocks.count()
def __get_database(self):
client = MongoClient(self.connection_string)
return client[self.database_name]
def __find_base(self, collection_name, query):
database = self.__get_database()
query['superceded'] = False
return database[collection_name].find(filter=query, projection={'block_type': 0})
def find_one(self, collection_name, query):
result = self.__find_base(collection_name, query)
sorted_result = list(result.sort([("_id", -1)]).limit(1))
if len(sorted_result) == 0:
return None
result = sorted_result[0]
del result["_id"]
del result["superceded"]
return self.audit_result(result)
def find(self, collection_name, query):
results = list(self.__find_base(collection_name, query).sort([("_id", -1)]))
for result in results:
del result["_id"]
del result["superceded"]
return self.audit_results(results)
def audit_result(self, query_result):
database = self.__get_database()
block = database.Blocks.find_one(filter={"hash": query_result['hash_id']})
proposed_hash = generate_audit_block(block['id'], query_result, block['block_type'],
block['timestamp'], block['previous_hash'])
if proposed_hash.hash == block['hash']:
return query_result
return None
def audit_results(self, query_results):
database = self.__get_database()
results = []
for result in query_results:
block = database.Blocks.find_one(filter={"hash": result['hash_id']})
proposed_hash = generate_audit_block(block['id'], result, block['block_type'],
block['timestamp'], block['previous_hash']).hash
if proposed_hash == block['hash']:
results.append(result)
return results
def get_blockchain_hash_links(self):
block_hash_links = self.__get_database().Blocks.find(sort=[("_id", -1)],
projection={'hash': 1, 'previous_hash': 1, '_id': 0})
return {elem['hash']: elem['previous_hash'] for elem in list(block_hash_links)}
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,269
|
monoper/BlockchainDB
|
refs/heads/main
|
/src/blockchain/blockchain.py
|
"""Implementation of the actual blockchain"""
import asyncio
import json
import os
from datetime import datetime, timezone
import time
import logging
import requests
import boto3
from injector import inject
from .mongo import MongoDb
from .models import Block, ProposedBlock, generate_block, generate_from_proposed_block
class Blockchain:
"""Primary class to control the blockchain"""
@inject
def __init__(self):
self.database = MongoDb()
self.nodes = []
if 'ENVIRONMENT' not in os.environ or os.environ['ENVIRONMENT'] == 'local' \
or os.environ['ENVIRONMENT'] == 'development':
if 'NODES' in os.environ and len(os.environ['NODES']) > 0:
self.nodes = json.loads(os.environ['NODES'])
else:
self.nodes = get_aws_nodes()
logging.info(f'Using nodes: {self.nodes}')
count = self.database.get_block_count()
if count == 0:
self.__create_genesis_block()
def __create_genesis_block(self):
"""Creates the genesys block for the chain. This should only be called once"""
self.__commit(Block([], 'GENISYS', '', '', '', '', '', ''))
def commit_transaction(self, transaction, block_type, data_collection_name,
data_key_field_name, data_key_value):
"""Handles the commit for any transaction either create or edit"""
retry_count = 3
count = 0
while count < retry_count:
new_block = generate_block(transaction, block_type,
datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S %z"),
self.last_block, data_collection_name, data_key_field_name,
data_key_value)
logging.info(f'New block created with hash: {new_block.hash}')
is_valid = self.validate_block(new_block)
if is_valid:
self.__commit(new_block)
return True
logging.info('Not enough successful results for block. Block rejected.')
count += 1
time.sleep(0.1 * count)
return False
def __commit(self, block: Block):
"""
Starts the process to add a block to the blockchain
"""
self.database.commit_block(block)
return block
def get_proposed_block_hash(self, proposed_block: ProposedBlock):
"""
Generates a block that is potentially to be added to the blockchain
"""
logging.debug(f'proposed: {proposed_block}')
block = generate_from_proposed_block(proposed_block, self.last_block)
logging.debug(block)
return block.hash
def get_new_block_hash(self, transaction, block_type, timestamp, data_collection_name,
data_key_field_name, data_key_value):
"""
Generates a candidate block and calculates its hash
"""
logging.info(f'Previous hash: {self.last_block}')
new_block = generate_block(transaction, block_type, timestamp,
self.last_block, data_collection_name,
data_key_field_name, data_key_value)
logging.info(f'New block: {transaction}, {block_type}, {timestamp},\
{data_collection_name}, {data_key_field_name}, {data_key_value}')
return new_block.hash
def validate_block(self, block: Block):
"""
Dispatchs blocks for comparison against other nodes and determines
the results
"""
if len(self.nodes) == 0:
return True
proposed_block = ProposedBlock(**vars(block))
logging.info('Starting node conferral process')
results = asyncio.run(self.validate_with_other_nodes(proposed_block))
logging.info(f'Node conferral results: {results}')
successful_nodes = []
for result in results:
logging.info(f'status code: {result.status_code} hash: {result.text}')
logging.debug(f'Current hash: {block.hash} Conferral Node hash: {result.text}')
if result.status_code == 200 and result.text == f'"{block.hash}"':
logging.debug('Adding successful validated node')
successful_nodes.append(result)
logging.debug(f'Successful Nodes: {len(successful_nodes)}')
logging.debug(f'All results: {len(results)}')
logging.debug(f'Rate of success: {(len(successful_nodes) / len(results)) + 0.0}')
return ((len(successful_nodes) / len(results)) + 0.0) > 0.75
async def validate_with_other_nodes(self, proposed_block):
"""
Handles the coallation of the block validation requests
"""
logging.debug(f'Using nodes: {self.nodes}')
outstanding_requests_tasks = [self.validate_with_other_node_request(node, proposed_block)
for node in self.nodes]
if len(outstanding_requests_tasks) == 0:
return []
return await asyncio.gather(*outstanding_requests_tasks)
async def validate_with_other_node_request(self, node, proposed_block):
"""
Dispatchs the proposed block for other nodes to confirm the hash is valid
"""
logging.info(f'Attempting to confirm with node at address: \
{node}/api/blockchain/validate-block and payload: {proposed_block.json()}')
return requests.post(f'{node}/api/blockchain/validate-block', data=proposed_block.json())
def validate(self):
"""
Validates the blockchain itself to ensure that all nodes are accounted for and in order
based upon the links from one block to the next. Similar to traversal of a linked list.
"""
hash_links = self.database.get_blockchain_hash_links()
visited = {}
if len(hash_links) == 0:
return True
list_keys = list(hash_links.keys())
next_key = hash_links[list_keys[0]]
visited[list_keys[0]] = True
hash_links.pop(list_keys[0])
while next_key != '':
tmp_key = hash_links[next_key]
hash_links.pop(next_key)
next_key = tmp_key
if len(hash_links) > 0:
logging.error('Blockchain failed to validate at: ')
return False
logging.info(f'Blockchain failed to validate at: {datetime.timestamp()}')
return True
def find_one(self, collection_name, query):
"""
Wrapper to call to find a single node and its real value in the database
"""
return self.database.find_one(collection_name, query)
def find(self, collection_name, query):
"""
Wrapper to call to find a multiple nodes and their real values in the database
"""
return self.database.find(collection_name, query)
@property
def last_block(self):
return self.database.get_latest_hash()
def get_aws_nodes():
client = boto3.client('servicediscovery')
metadata_uri = os.environ['ECS_CONTAINER_METADATA_URI']
container_metadata = requests.get(metadata_uri).json()
container_ip = container_metadata['Networks'][0]['IPv4Addresses'][0]
node_ips = []
for service in client.list_services()['Services']:
for instance in client.list_instances(
ServiceId=service['Id'],
MaxResults=100
)['Instances']:
if container_ip != instance['Attributes']['AWS_INSTANCE_IPV4']:
node_ips.append(instance['Attributes']['AWS_INSTANCE_IPV4'])
return node_ips
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,270
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/dependencies.py
|
"""Dependency injection configuration"""
from injector import singleton
from .api.blockchain import BlockchainDb
def configure_dependencies(binder):
"""Service configurations"""
binder.bind(BlockchainDb, to=BlockchainDb, scope=singleton)
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,271
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/test/load_testing/provider_mass_adder.py
|
from pydantic import BaseModel, ValidationError, EmailStr, validator
from typing import List
from datetime import datetime
import random
import asyncio
import json
import requests
import logging
import uuid
from enum import Enum
class Provinces(Enum):
Ontario = 0
Manitoba = 1
Quebec = 2
Newfoundland = 3
Saskatchewan = 4
PrinceEdwardIsland = 5
BritishColumbia = 6
NovaScotia = 7
Yukon = 8
NorthwestTerritories = 9
Nunavut = 10
NewBrunswick = 11
class Name(BaseModel):
firstName: str
middleName: str
lastName: str
class PhoneNumbers(BaseModel):
mobile: str
home: str
work: str
class ProvidableTreatment(BaseModel):
name: str
description: str
class Address(BaseModel):
unit: str
streetAddress: str
city: str
province: Provinces
country: str
postalCode: str
@validator('country')
def country_must_be_canada(cls, v):
if v.lower() != 'canada':
raise ValueError("Only Canada is supported as a country.")
return v
class Appointment(BaseModel):
appointmentId: str = str(uuid.uuid4())
clientId: str
providerId: str
reasonForAppointment: str
address: Address
date: datetime
status: int
attended: bool
cancellationReason: str
class RegisterProvider(BaseModel):
username: str
password: str
name: Name
phoneNumbers: PhoneNumbers
addresses: List[Address]
dateOfBirth: datetime
email: EmailStr
providableTreatments: List[ProvidableTreatment]
class HelperEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, uuid.UUID):
return str(o)
if isinstance(o, datetime):
return o.isoformat()
if isinstance(o, Provinces):
return o.value
return json.JSONEncoder.default(self, o)
def build_provider():
name = get_name()
number_of_addresses = random.randint(1, 5)
number_of_providable_treatments = random.randint(1, 7)
address = []
providableTreatments = []
for i in range(0, random.randint(1, 7)):
address.append(get_address())
for i in range(0, random.randint(1, 7)):
providable_treatment = get_providable_treatments()
treatment_exists = False
for providableTreatment in providableTreatments:
if providableTreatment.name == providable_treatment.name:
treatment_exists = True
if not treatment_exists:
providableTreatments.append(providable_treatment)
phone_numbers = get_phone_numbers()
email = get_email(name)
return RegisterProvider(**{
'username': email,
'password': 'Password!@3',
'name': name,
'phoneNumbers': phone_numbers,
'addresses': address,
'dateOfBirth': datetime(1950 + random.randint(0, 40), random.randint(1, 12), random.randint(1, 28)),
'providableTreatments':providableTreatments,
'email':email
})
def get_phone_numbers():
return PhoneNumbers(**{
'mobile': '1234567890',
'work': '1234567890',
'home': '1234567890'
})
def get_email(name: Name):
return f'{name.firstName}.{name.lastName}@monoper.io'
def get_name():
first_names = ['john', 'sally', 'kate', 'samina', 'anne', 'will', 'catherine', 'ayla', 'kayla', 'katrina', 'rebecca', 'robert', 'sam', 'eric', 'greg']
last_names = ['smythe', 'smith', 'johnson', 'wali', 'erikson', 'takamora', 'harper', 'miller', 'jones', 'davis', 'garcia']
first_name = first_names[random.randint(0, len(first_names)-1)]
middle_name = first_names[random.randint(0, len(first_names)-1)]
last_name = last_names[random.randint(0, len(last_names)-1)]
return Name(**{'firstName':first_name, 'middleName': middle_name, 'lastName': last_name})
def get_address():
cities = ['toronto', 'vancouver', 'montreal', 'winnipeg', 'halifax', 'london', 'paris', 'huntsville']
street_addresses = ['main', 'yonge', 'queen', 'dundas', 'lord', 'red', 'blue', 'ontario', 'durham']
street_suffixes = ['street', 'avenue', 'boulevard', 'circle']
unit = random.randint(0, 99)
city = cities[random.randint(0, len(cities)-1)]
street_address = street_addresses[random.randint(0, len(street_addresses)-1)]
street_suffix = street_suffixes[random.randint(0, len(street_suffixes)-1)]
postal_code = 'l1l1w2'
return Address(**{
'unit': unit,
'streetAddress': f'{street_address} {street_suffix}',
'city': city,
'province': random.randint(0, 11),
'country': 'canada',
'postalCode': postal_code
})
def get_providable_treatments():
treatment_names = ['back massage', 'skin cleanse', 'general check up', 'blood testing', 'MRI scan', 'CT scan', 'cancer screening']
treatment_name = treatment_names[random.randint(0, len(treatment_names)-1)]
return ProvidableTreatment(**{
'name': treatment_name,
'description': treatment_name
})
def create_provider():
url = 'https://api.dev.blockmedisolutions.com/api/auth/register-provider'
print(f'Using url: {url}')
data = build_provider()
json_data = json.dumps(data.dict(), cls=HelperEncoder)
print(json_data)
print(data.json())
resp = requests.post(url, data=data.json())
print(resp)
print(resp.content)
if __name__ == "__main__":
create_provider()
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,272
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/__init__.py
|
from .provider_routes import api as provider_api
from .client_routes import api as client_api
from .auth_routes import api as auth_api
from .blockchain import blockchain_api
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,273
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/blockchain/models.py
|
import uuid
import json
import logging
from hashlib import sha256
from datetime import datetime
from pydantic import BaseModel
from ..util import HelperEncoder
class Block:
def __init__(self, id, data, block_type, timestamp: datetime, previous_hash,
data_collection_name, data_key_field_name, data_key_value):
self.id = id
self.block_type = block_type
self.timestamp = timestamp
self.previous_hash = previous_hash
self.data = json.dumps(data, cls=HelperEncoder)
logging.debug(json.dumps(self.__dict__, sort_keys=True, cls=HelperEncoder))
self.hash = sha256(json.dumps(self.__dict__, sort_keys=True, cls=HelperEncoder).encode()) \
.hexdigest()
self.data_collection_name = data_collection_name
self.data_key_field_name = data_key_field_name
self.data_key_value = data_key_value
self.superceded = False
def get_naked_block(self):
return NakedBlock(self.id, self.timestamp, self.block_type, self.hash, self.previous_hash)
def get_data_block(self):
return DataBlock(self.timestamp, self.data_collection_name,
self.data, self.hash, self.block_type, self.superceded)
class NakedBlock:
def __init__(self, id, timestamp, block_type, hash, previous_hash):
self.id = id
self.block_type = block_type
self.timestamp = timestamp
self.previous_hash = previous_hash
self.hash = hash
class DataBlock:
def __init__(self, timestamp, data_collection_name, data, hash, block_type, superceded):
self.timestamp = timestamp
self.collection = data_collection_name
self.block_type = block_type
self.data = data
self.superceded = superceded
self.hash = hash
def set_superceded(self):
self.superceded = True
def get_document(self):
document = json.loads(self.data)
document['hash_id'] = self.hash
document['block_type'] = self.block_type
document['superceded'] = self.superceded
return document
def block_types_lookup(block_type):
block_types = {"CREATE": 0, "GRANT": 1, "EDIT": 2}
return block_types[block_type]
def block_types_reverse_lookup(block_type):
print(block_type)
block_types = {0: "CREATE", 1: "GRANT", 2: "EDIT"}
return block_types[block_type]
class ProposedBlock(BaseModel):
id: str
block_type: str
timestamp: str
data: str
data_collection_name: str
data_key_field_name: str
data_key_value: str
def generate_block(data, block_type, timestamp: datetime, previous_hash,
data_collection_name, data_key_field_name, data_key_value):
return Block(uuid.uuid4().hex, data, block_type, timestamp, previous_hash,
data_collection_name, data_key_field_name, data_key_value)
def generate_audit_block(id, data, block_type, timestamp: datetime, previous_hash):
del data["hash_id"]
return Block(id, data, block_type, timestamp, previous_hash, '', '', '')
def generate_from_proposed_block(proposed_block: ProposedBlock, previous_hash):
return Block(proposed_block.id, json.loads(proposed_block.data),
proposed_block.block_type, proposed_block.timestamp, previous_hash,
proposed_block.data_collection_name, proposed_block.data_key_field_name,
proposed_block.data_key_value)
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,274
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/auth_models.py
|
"""
Models that are used during the authentication process and
for adding new clients/providers
"""
from typing import List
from datetime import datetime
from pydantic import BaseModel, EmailStr
from .common_models import Address, Name, PhoneNumbers
from .provider_models import ProvidableTreatment
class RegisterClient(BaseModel):
"""
Model for client registration
"""
username: str
password: str
name: Name
phoneNumbers: PhoneNumbers
address: Address
dateOfBirth: datetime
email: EmailStr
class RegisterProvider(BaseModel):
"""
Model for provider registration
"""
username: str
password: str
name: Name
phoneNumbers: PhoneNumbers
addresses: List[Address]
dateOfBirth: datetime
email: EmailStr
providableTreatments: List[ProvidableTreatment]
class SignIn(BaseModel):
"""
Model for sign in
"""
username: str
password: str
class ConfirmSignUp(BaseModel):
"""
Model for confirming sign up
"""
username: str
verificationCode: str
class ForgotPassword(BaseModel):
"""
Model for forgot password
"""
username: str
class ConfirmForgotPassword(BaseModel):
"""
Model for confirming a client forgotten password
"""
username: str
verification_code: str
new_password: str
class ChangePassword(BaseModel):
"""
Model for changing a password
"""
old_password: str
new_password: str
class User(BaseModel):
"""
Model for user
"""
userId: str
username: str
usertype: str
class Token(BaseModel):
"""
Model for auth token
"""
id_token: str
access_token: str
refresh_token: str
class SignInResponse(BaseModel):
"""
Model for sign in response
"""
user: User
token: Token
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,275
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/blockchain/api/__init__.py
|
"""Renaming blockchain api export"""
from .blockchain_routes import api as blockchain_api
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,276
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/test/test_blockchain.py
|
import unittest
import time
from blockchain.blockchain import Blockchain
from blockchain.block import Block
class testTest(unittest.TestCase):
def test_genesis_block_created(self):
blockchain = Blockchain()
self.assertEqual(len(blockchain.chain), 1)
def test_add_single_block(self):
blockchain = Blockchain()
blockchain.addBlock(["aaa"])
self.assertEqual(len(blockchain.chain), 2)
def test(self):
self.assertTrue(True)
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,277
|
monoper/BlockchainDB
|
refs/heads/main
|
/example/app/api/util.py
|
"""Utility functions"""
import json
import uuid
import os
from datetime import datetime
from pycognito import Cognito
from fastapi import Depends, HTTPException
from fastapi.security.http import HTTPBearer, HTTPBasicCredentials
from .common_models import AppointmentStatus, Provinces, Appointment, \
Address, Name, PhoneNumbers, ProvidableTreatment, \
PrescribedTreatment, Notes
from .provider_models import Provider
from .client_models import Client, LinkedProvider
auth = HTTPBearer()
async def verify_auth_header(authorization: HTTPBasicCredentials = Depends(auth)):
"""
Verifies the credentials sent in the authorisation header with cognito
"""
try:
aws_cognito = Cognito(os.environ['USER_POOL_ID'],
os.environ['USER_POOL_WEB_CLIENT_ID'],
access_token=authorization.credentials)
if aws_cognito.get_user() is None:
raise HTTPException(status_code=403)
return authorization.credentials
except Exception as forbidden:
raise HTTPException(status_code=403) from forbidden
class HelperEncoder(json.JSONEncoder):
"""
Helper for JSON decoding of classes
"""
def default(self, o):
if isinstance(o, uuid.UUID):
return str(o)
if isinstance(o, datetime):
return o.isoformat()
if isinstance(o, Provinces):
return o.value
if isinstance(o, AppointmentStatus):
return o.value
if isinstance(o,
(Address,
Appointment,
Client,
Name,
PhoneNumbers,
ProvidableTreatment,
Provider,
LinkedProvider,
PrescribedTreatment,
Notes)):
return o.__dict__
return json.JSONEncoder.default(self, o)
|
{"/example/app/api/blockchain/__init__.py": ["/example/app/api/blockchain/api/__init__.py"], "/example/app/api/provider_models.py": ["/example/app/api/common_models.py"], "/example/app/main.py": ["/example/app/api/__init__.py"], "/example/app/api/client_models.py": ["/example/app/api/common_models.py"], "/src/blockchain/api/blockchain_routes.py": ["/src/blockchain/blockchain.py"], "/example/app/api/provider_routes.py": ["/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py", "/example/app/api/client_models.py"], "/example/app/api/auth_routes.py": ["/example/app/api/blockchain/__init__.py", "/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/util.py", "/example/app/api/auth_models.py"], "/example/app/api/client_routes.py": ["/example/app/api/client_models.py", "/example/app/api/provider_models.py", "/example/app/api/common_models.py", "/example/app/api/blockchain/__init__.py", "/example/app/api/util.py"], "/example/app/api/blockchain/mongo.py": ["/example/app/api/blockchain/models.py"], "/example/app/dependencies.py": ["/example/app/api/blockchain/__init__.py"], "/example/app/api/__init__.py": ["/example/app/api/provider_routes.py", "/example/app/api/client_routes.py", "/example/app/api/auth_routes.py", "/example/app/api/blockchain/__init__.py"], "/example/app/api/blockchain/models.py": ["/example/app/api/util.py"], "/example/app/api/auth_models.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py"], "/example/app/api/util.py": ["/example/app/api/common_models.py", "/example/app/api/provider_models.py", "/example/app/api/client_models.py"]}
|
20,278
|
ALEXJAZZ008008/physics_simulation_opengl
|
refs/heads/master
|
/main.py
|
from OpenGL.GL import *
from OpenGL.GLUT import *
import delta_time
import constants
import spheres
import cube
import camera
def display():
delta_time.update_current_time()
delta_time.update_delta_time()
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
if keyboard_camera.rotation_bool:
glRotatef(keyboard_camera.rotation_magnitude.x, keyboard_camera.rotation_direction.x, 0.0, 0.0)
glRotatef(keyboard_camera.rotation_magnitude.y, 0.0, keyboard_camera.rotation_direction.y, 0.0)
glRotatef(keyboard_camera.rotation_magnitude.z, 0.0, 0.0, keyboard_camera.rotation_direction.z)
keyboard_camera.reset_rotation()
keyboard_camera.rotation_bool = False
if keyboard_camera.translation_bool:
glTranslatef(keyboard_camera.translation.x, keyboard_camera.translation.y, 0.0)
glScalef(keyboard_camera.translation.z, keyboard_camera.translation.z, keyboard_camera.translation.z)
keyboard_camera.reset_translation()
keyboard_camera.translation_bool = False
box.update()
box.draw()
ball_list.update(delta_time.delta_time * keyboard_camera.speed, constants.gravitational_acceleration(), box)
ball_list.draw()
glFlush()
glutSwapBuffers()
glutPostRedisplay()
delta_time.update_previous_time()
def keyboard(key, i, j):
if key == b'w':
keyboard_camera.rotation_magnitude.x = -1.0
keyboard_camera.rotation_direction.x = 1.0
keyboard_camera.rotation_bool = True
elif key == b's':
keyboard_camera.rotation_magnitude.x = 1.0
keyboard_camera.rotation_direction.x = 1.0
keyboard_camera.rotation_bool = True
elif key == b'a':
keyboard_camera.rotation_magnitude.y = 1.0
keyboard_camera.rotation_direction.y = 1.0
keyboard_camera.rotation_bool = True
elif key == b'd':
keyboard_camera.rotation_magnitude.y = -1.0
keyboard_camera.rotation_direction.y = 1.0
keyboard_camera.rotation_bool = True
elif key == b'e':
keyboard_camera.rotation_magnitude.z = 1.0
keyboard_camera.rotation_direction.z = 1.0
keyboard_camera.rotation_bool = True
elif key == b'q':
keyboard_camera.rotation_magnitude.z = -1.0
keyboard_camera.rotation_direction.z = 1.0
keyboard_camera.rotation_bool = True
elif key == b'i':
keyboard_camera.translation.y = 100.0
keyboard_camera.translation_bool = True
elif key == b'k':
keyboard_camera.translation.y = -100.0
keyboard_camera.translation_bool = True
elif key == b'j':
keyboard_camera.translation.x = 100.0
keyboard_camera.translation_bool = True
elif key == b'l':
keyboard_camera.translation.x = -100.0
keyboard_camera.translation_bool = True
elif key == b'o':
keyboard_camera.translation.z = 1.1
keyboard_camera.translation_bool = True
elif key == b'u':
keyboard_camera.translation.z = 0.9
keyboard_camera.translation_bool = True
elif key == b't':
keyboard_camera.speed += 0.1
elif key == b'g':
keyboard_camera.speed -= 0.1
def main():
glutInit()
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_ALPHA | GLUT_DEPTH)
glutInitWindowSize(width, height)
glutInitWindowPosition(0, 0)
glutCreateWindow("python_simulation_opengl")
glutDisplayFunc(display)
glutIdleFunc(display)
glutKeyboardFunc(keyboard)
glMatrixMode(GL_PROJECTION)
glShadeModel(GL_SMOOTH)
glEnable(GL_DEPTH_TEST)
glEnable(GL_LIGHTING)
glLightfv(GL_LIGHT0, GL_AMBIENT, [0.0, 0.0, 0.0, 1.0])
glLightfv(GL_LIGHT0, GL_DIFFUSE, [0.7, 0.7, 0.7, 1.0])
glLightfv(GL_LIGHT0, GL_SPECULAR, [0.7, 0.7, 0.7, 1.0])
glLightfv(GL_LIGHT0, GL_POSITION, [-500, 1000, -1000, 1])
glEnable(GL_LIGHT0)
glLightModelfv(GL_LIGHT_MODEL_AMBIENT, [0.3, 0.3, 0.3, 1.0])
glLightModeli(GL_LIGHT_MODEL_LOCAL_VIEWER, GL_TRUE)
glEnable(GL_CULL_FACE)
glCullFace(GL_BACK)
glClearColor(0.0, 0.0, 0.0, 0.0)
glLoadIdentity()
glOrtho(0.0, width, height, 0.0, -100000.0, 100000.0)
glPointSize(1.0)
glTranslatef(width / 2, height / 2, 0.0)
glScalef(0.25, 0.25, 0.25)
glRotatef(180.0, 0.0, 0.0, 1.0)
glRotatef(20.0, 1.0, 1.0, 0.0)
glutMainLoop()
height = 900
width = 1600
keyboard_camera = camera.Camera()
box = cube.Cube(1000, constants.cube_indices(), 0.8, 0.2)
ball_list = spheres.Spheres(20, 125, 2.0, box.size, 0.8, 0.2)
delta_time = delta_time.DeltaTime()
main()
|
{"/main.py": ["/delta_time.py", "/constants.py", "/spheres.py", "/cube.py", "/camera.py"], "/constants.py": ["/vector3d.py"], "/camera.py": ["/vector3d.py"], "/sphere.py": ["/vector3d.py"], "/spheres.py": ["/sphere.py"]}
|
20,279
|
ALEXJAZZ008008/physics_simulation_opengl
|
refs/heads/master
|
/constants.py
|
import vector3d
def cube_indices():
return (0, 1), (0, 3), (0, 4), (2, 1), (2, 3), (2, 7), (6, 3), (6, 4), (6, 7), (5, 1), (5, 4), (5, 7)
def gravitational_acceleration():
return vector3d.Vector3D(0, -9800, 0)
|
{"/main.py": ["/delta_time.py", "/constants.py", "/spheres.py", "/cube.py", "/camera.py"], "/constants.py": ["/vector3d.py"], "/camera.py": ["/vector3d.py"], "/sphere.py": ["/vector3d.py"], "/spheres.py": ["/sphere.py"]}
|
20,280
|
ALEXJAZZ008008/physics_simulation_opengl
|
refs/heads/master
|
/cube.py
|
from OpenGL.GL import *
class Cube(object):
def __init__(self, size, indices, elasticity, friction):
self.size = size
self.vertices = (
(size, -size, -size),
(size, size, -size),
(-size, size, -size),
(-size, -size, -size),
(size, -size, size),
(size, size, size),
(-size, -size, size),
(-size, size, size)
)
self.indices = indices
self.elasticity = elasticity
self.friction = friction
def update(self):
pass
def draw(self):
glMaterialfv(GL_FRONT, GL_AMBIENT_AND_DIFFUSE, [1.0, 1.0, 1.0, 1.0])
glMaterialfv(GL_FRONT, GL_SPECULAR, [1, 1, 1, 1])
glMaterialfv(GL_FRONT, GL_SHININESS, [100.0])
glBegin(GL_LINES)
for index in self.indices:
for vertex in index:
glVertex3fv(self.vertices[vertex])
glEnd()
|
{"/main.py": ["/delta_time.py", "/constants.py", "/spheres.py", "/cube.py", "/camera.py"], "/constants.py": ["/vector3d.py"], "/camera.py": ["/vector3d.py"], "/sphere.py": ["/vector3d.py"], "/spheres.py": ["/sphere.py"]}
|
20,281
|
ALEXJAZZ008008/physics_simulation_opengl
|
refs/heads/master
|
/camera.py
|
import vector3d
class Camera(object):
def __init__(self):
self.translation = vector3d.Vector3D(0.0, 0.0, 1.0)
self.rotation_magnitude = vector3d.Vector3D(0.0, 0.0, 0.0)
self.rotation_direction = vector3d.Vector3D(0.0, 0.0, 0.0)
self.translation_bool = False
self.rotation_bool = False
self.speed = 1.0
def reset_translation(self):
self.translation = vector3d.Vector3D(0.0, 0.0, 1.0)
self.translation_bool = False
def reset_rotation(self):
self.rotation_magnitude = vector3d.Vector3D(0.0, 0.0, 0.0)
self.rotation_direction = vector3d.Vector3D(0.0, 0.0, 0.0)
self.rotation_bool = False
|
{"/main.py": ["/delta_time.py", "/constants.py", "/spheres.py", "/cube.py", "/camera.py"], "/constants.py": ["/vector3d.py"], "/camera.py": ["/vector3d.py"], "/sphere.py": ["/vector3d.py"], "/spheres.py": ["/sphere.py"]}
|
20,282
|
ALEXJAZZ008008/physics_simulation_opengl
|
refs/heads/master
|
/sphere.py
|
import random
from OpenGL.GL import *
from OpenGL.GLUT import *
import vector3d
class Sphere(object):
def __init__(self, elasticity, friction):
self.size = 10
self.mass = 1.0
self.colour = vector3d.Vector3D(0.0, 0.0, 0.0)
self.position = vector3d.Vector3D(0.0, 0.0, 0.0)
self.previous_position = self.position
self.velocity = vector3d.Vector3D(0.0, 0.0, 0.0)
self.elasticity = elasticity
self.friction = friction
@staticmethod
def get_random_size(max_size):
return random.randint(50, max_size)
def reset_size(self, max_size):
self.size = self.get_random_size(max_size)
@staticmethod
def get_random_mass(max_mass):
return random.uniform(0.5, max_mass)
def reset_mass(self, max_mass):
self.mass = self.get_random_mass(max_mass)
@staticmethod
def get_random_colour():
return vector3d.Vector3D(random.random(), random.random(), random.random())
def reset_colour(self):
self.colour = self.get_random_colour()
def get_random_position(self, box_size):
return vector3d.Vector3D(random.uniform(-box_size + self.size, box_size - self.size),
random.uniform(0, box_size - self.size),
random.uniform(-box_size + self.size, box_size - self.size))
def ball_collision_detection(self, ball):
return self.position.dot(ball.position) < self.size + ball.size
def reset_position(self, box_size, balls):
colliding = True
while colliding:
colliding = False
self.position = self.get_random_position(box_size)
for ball in balls:
if ball != self:
if self.ball_collision_detection(ball):
colliding = True
@staticmethod
def get_random_velocity(box_size):
return vector3d.Vector3D(random.uniform(-box_size, box_size) * 2.0,
0.0,
random.uniform(-box_size, box_size) * 2.0)
def reset_velocity(self, box_size):
self.velocity = self.get_random_velocity(box_size)
def reset(self, max_size, max_mass, box_size, balls):
self.reset_size(max_size)
self.reset_mass(max_mass)
self.reset_colour()
self.reset_position(box_size, balls)
self.reset_velocity(box_size)
@staticmethod
def integrate(value, increment, delta_time):
new_value = vector3d.Vector3D(0, 0, 0)
new_value.x = value.x + (increment.x * delta_time)
new_value.y = value.y + (increment.y * delta_time)
new_value.z = value.z + (increment.z * delta_time)
return new_value
def box_elastic_constant(self, box):
return (self.elasticity + box.elasticity) * 0.5
def box_friction_constant(self, box):
return 1 - ((self.friction + box.friction) * 0.5)
def box_collision(self, box):
if self.position.x - self.size < -box.size or self.position.x + self.size > box.size:
self.position = self.previous_position
self.velocity.x *= -1
self.velocity.x *= self.box_elastic_constant(box)
self.velocity.y *= self.box_friction_constant(box)
self.velocity.z *= self.box_friction_constant(box)
if self.position.y - self.size < -box.size or self.position.y + self.size > box.size:
self.position = self.previous_position
self.velocity.y *= -1
self.velocity.x *= self.box_friction_constant(box)
self.velocity.y *= self.box_elastic_constant(box)
self.velocity.z *= self.box_friction_constant(box)
if self.position.z - self.size < -box.size or self.position.z + self.size > box.size:
self.position = self.previous_position
self.velocity.z *= -1
self.velocity.x *= self.box_friction_constant(box)
self.velocity.y *= self.box_friction_constant(box)
self.velocity.z *= self.box_elastic_constant(box)
def ball_elastic_constant(self, ball):
return (self.elasticity + ball.elasticity) * 0.5
def ball_collision_response(self, ball):
if self.ball_collision_detection(ball):
normal = vector3d.Vector3D(self.position.x - ball.position.x,
self.position.y - ball.position.y,
self.position.z - ball.position.z)
normal.normalise()
force_magnitude = ((self.velocity.dot(normal) - ball.velocity.dot(normal)) * 2.0) / (self.mass + ball.mass)
self.velocity = vector3d.Vector3D(self.velocity.x - ((force_magnitude * ball.mass) * normal.x),
self.velocity.y - ((force_magnitude * ball.mass) * normal.y),
self.velocity.z - ((force_magnitude * ball.mass) * normal.z))
ball.velocity = vector3d.Vector3D(ball.velocity.x + ((force_magnitude * self.mass) * normal.x),
ball.velocity.y + ((force_magnitude * self.mass) * normal.y),
ball.velocity.z + ((force_magnitude * self.mass) * normal.z))
self.velocity.x *= normal.x * self.ball_elastic_constant(ball)
self.velocity.y *= normal.y * self.ball_elastic_constant(ball)
self.velocity.z *= normal.z * self.ball_elastic_constant(ball)
ball.velocity.x *= normal.x * ball.ball_elastic_constant(self)
ball.velocity.y *= normal.y * ball.ball_elastic_constant(self)
ball.velocity.z *= normal.z * ball.ball_elastic_constant(self)
def check_moving(self, max_size, max_mass, box_size, balls):
if self.velocity.magnitude() < 100:
self.reset(max_size, max_mass, box_size, balls)
def update(self, delta_time, force, box):
self.previous_position = self.position
self.velocity = self.integrate(self.velocity, force, delta_time)
self.position = self.integrate(self.position, self.velocity, delta_time)
self.box_collision(box)
def draw(self):
glMaterialfv(GL_FRONT, GL_AMBIENT_AND_DIFFUSE, [self.colour.x, self.colour.y, self.colour.z, 1.0])
glMaterialfv(GL_FRONT, GL_SPECULAR, [1, 1, 1, 1])
glMaterialfv(GL_FRONT, GL_SHININESS, [100.0])
glPushMatrix()
glTranslatef(self.position.x, self.position.y, self.position.z)
glutSolidSphere(self.size, self.size, self.size)
glPopMatrix()
|
{"/main.py": ["/delta_time.py", "/constants.py", "/spheres.py", "/cube.py", "/camera.py"], "/constants.py": ["/vector3d.py"], "/camera.py": ["/vector3d.py"], "/sphere.py": ["/vector3d.py"], "/spheres.py": ["/sphere.py"]}
|
20,283
|
ALEXJAZZ008008/physics_simulation_opengl
|
refs/heads/master
|
/delta_time.py
|
import time
class DeltaTime(object):
def __init__(self):
self.previous_time = self.get_current_time()
self.current_time = self.previous_time
self.delta_time = self.current_time - self.previous_time
@staticmethod
def get_current_time():
return time.time()
def update_previous_time(self):
self.previous_time = self.current_time
def update_current_time(self):
self.current_time = self.get_current_time()
def update_delta_time(self):
self.delta_time = self.get_current_time() - self.previous_time
|
{"/main.py": ["/delta_time.py", "/constants.py", "/spheres.py", "/cube.py", "/camera.py"], "/constants.py": ["/vector3d.py"], "/camera.py": ["/vector3d.py"], "/sphere.py": ["/vector3d.py"], "/spheres.py": ["/sphere.py"]}
|
20,284
|
ALEXJAZZ008008/physics_simulation_opengl
|
refs/heads/master
|
/spheres.py
|
import sphere
class Spheres(object):
def __init__(self, number_of_spheres, max_size, max_mass, box_size, elasticity, friction):
self.balls = []
self.max_size = max_size
self.max_mass = max_mass
for i in range(number_of_spheres):
self.balls.append(sphere.Sphere(elasticity, friction))
for ball in self.balls:
ball.reset(self.max_size, self.max_mass, box_size, self.balls)
def update(self, delta_time, force, box):
for ball in self.balls:
ball.update(delta_time, force, box)
for i, ball1 in enumerate(self.balls):
for ball2 in self.balls[i + 1::]:
ball1.ball_collision_response(ball2)
for ball in self.balls:
ball.check_moving(self.max_size, self.max_mass, box.size, self.balls)
def draw(self):
for ball in self.balls:
ball.draw()
|
{"/main.py": ["/delta_time.py", "/constants.py", "/spheres.py", "/cube.py", "/camera.py"], "/constants.py": ["/vector3d.py"], "/camera.py": ["/vector3d.py"], "/sphere.py": ["/vector3d.py"], "/spheres.py": ["/sphere.py"]}
|
20,285
|
ALEXJAZZ008008/physics_simulation_opengl
|
refs/heads/master
|
/vector3d.py
|
import math
class Vector3D(object):
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def magnitude(self):
return math.sqrt((self.x * self.x) + (self.y * self.y) + (self.z * self.z))
def dot(self, other):
x = self.x - other.x
y = self.y - other.y
z = self.z - other.z
return math.sqrt((x * x) + (y * y) + (z * z))
def normalise(self):
magnitude = self.magnitude()
if magnitude != 0:
self.x /= magnitude
self.y /= magnitude
self.z /= magnitude
else:
self.x = 0
self.y = 0
self.z = 0
|
{"/main.py": ["/delta_time.py", "/constants.py", "/spheres.py", "/cube.py", "/camera.py"], "/constants.py": ["/vector3d.py"], "/camera.py": ["/vector3d.py"], "/sphere.py": ["/vector3d.py"], "/spheres.py": ["/sphere.py"]}
|
20,301
|
LucasBR96/MST-ANIMATION
|
refs/heads/main
|
/monitor_with_animation_test.py
|
import kruskal_monitor as krus
import prim_monitor as prim
import networkx as nx
from matplotlib import animation, rc
import matplotlib.pyplot as plt
modo = 1
KRUSKAL = 0
PRIM = 1
G = nx.Graph()
E = dict()
V = set()
advance = True
clicked = False
fig, ax = plt.subplots(figsize=(10,8))
# Writer = animation.writers['ffmpeg']
# writer = Writer(fps=15, metadata=dict(artist='Me'), bitrate=1800)
def solution_generator():
global advance, modo
monitor = krus
if algo == PRIM:
monitor = prim
monitor._init( V , E )
while True:
if(advance):
if(modo == 1):
advance = False
seq = [ monitor._next() for i in range( 3 ) ]
if(seq.pop()):
yield pretty_vars(monitor.get_variables(), False)
else:
break
else:
yield pretty_vars(monitor.get_variables(), False)
yield pretty_vars(monitor.get_variables(), True)
def pretty_vars( mst_vars , end):
global edge_status , current_edge , Va , Ea
edge_status , current_edge , Va , Ea = mst_vars
s = ''
s += "edge_status = {}".format( edge_status ) + "\n"
s += "current_edge = {} {}".format( *current_edge ) + "\n"
s += "nodes in tree: " + "\n"
s += "\t" + ' '.join( Va ) + "\n"
s += "edges in tree:" + "\n"
for a , b in Ea:
s += "\t" + "{} {}".format( a , b ) +"\n"
if end:
current_edge = None
return s
def do_nothing():
# FuncAnimation requires an initialization function. We don't
# do any initialization, so we provide a no-op function.
pass
#FIXME - reduce only to drawing
def update(mst_edges):
current_edges = set()
current_edges.add(current_edge)
ax.clear()
all_edges = set(tuple(sorted((n1, n2))) for n1, n2 in G.edges())
node_labels = {}
for idx, node in enumerate(G.nodes()):
node_labels[node] = node
nx.draw_networkx_edges(
G, pos, edgelist=all_edges-Ea - current_edges, alpha=0.1,
edge_color='g', width=1, ax=ax
)
labels = nx.get_edge_attributes(G,'weight')
nx.draw_networkx_edges(
G, pos, edgelist=Ea - current_edges , alpha=1.0,
edge_color='green', width=1, ax=ax
)
if(current_edge != None):
nx.draw_networkx_edges(
G, pos, edgelist=current_edges , alpha=1.0,
edge_color='r', width=1, ax=ax
)
nx.draw_networkx_nodes(G, pos, nodelist=G.nodes()-Va, node_color='gray', alpha=0.5, node_size=300, ax=ax)
nx.draw_networkx_nodes(G, pos, nodelist=Va, node_color='b', alpha=0.5, node_size=300, ax=ax)
nx.draw_networkx_edge_labels(G,pos,edge_labels=labels, alpha=0.5, ax=ax)
nx.draw_networkx_labels(G, pos, node_labels, alpha=1, ax=ax)
def on_press(event):
global advance, modo
if (modo == 1):
advance = not advance
fig.canvas.mpl_connect('key_press_event', on_press)
def main():
global pos, algo, ani, modo
print( "escolha o algoritimo:" )
print( "0 - kruskal" )
print( "1 - prim" )
algo = int( input() )
print( "selecione o modo:" )
print( "0 - direto" )
print( "1 - controlado" )
modo = int( input() )
print()
print( "digite os vertices do grafo" )
print( 'formato: m m i' )
print( "m -> minuscula")
print( "i -> inteiro" )
print( "digite -1 se acabou")
while True:
tup = input().rstrip()
if tup == "-1":
break
a , b , m = tup.split()
E[ ( a , b ) ] = int( m )
V.add( a )
V.add( b )
G.add_nodes_from(V)
for key in E.keys():
print(key[0], key[1], E[key])
G.add_edge(key[0], key[1], weight = E[key])
pos = nx.random_layout(G)
node_labels = {}
for idx, node in enumerate(G.nodes()):
node_labels[node] = node
#ani = Player(fig, krus, ax, G, V, E, pos, nx.get_edge_attributes(G,'weight') ,node_labels)
ani = animation.FuncAnimation(
fig,
update,
init_func=do_nothing,
frames=solution_generator,
interval=500,
repeat = False
)
plt.show()
main()
|
{"/monitor_with_animation_test.py": ["/kruskal_monitor.py", "/prim_monitor.py"], "/monitor_test.py": ["/kruskal_monitor.py", "/prim_monitor.py"]}
|
20,302
|
LucasBR96/MST-ANIMATION
|
refs/heads/main
|
/CLI_input.py
|
import string
import sys
# GLOBAL VARS --------------------------------------------------
EXIT_CHAR = "*"
END_CHAR = "-1"
MAX_NODES = 100
CHAR_TERMS = {
"a":"algo" ,
"r":"exec" ,
"b":"build"
}
VALID_CHOICES = {
"algo" :[ "PRIM", "KRUSKAL" ],
"exec" :[ "DIRECT", "ITER" ],
"build":[ "CUSTOM" , "RANDOM"],
}
input_info = dict()
# FUNCTIONS ---------------------------------------------------
def random_build():
# setting number of nodes
print("digite a quantidade de nos")
print("maximo -> {}".format( MAX_NODES ) )
while True:
c = input()
if c == EXIT_CHAR: raise InterruptedError
if all( x in string.digits for x in c ):
m = int( c )
if m <= MAX_NODES:
input_info[ "num_nodes" ] = m
break
print( "entrada invalida, digite novamente")
print()
min_edges = m - 1 # A tree, Basicaly
max_edges = m**2 - m # Fully connected
print("digite a quantidade de arestas")
print("maximo -> {}".format( max_edges ) )
print("minimo -> {}".format( min_edges ) )
while True:
c = input()
if c == EXIT_CHAR: raise InterruptedError
if all( x in string.digits for x in c ):
m = int( c )
if min_edges <= m <= max_edges:
input_info[ "num_edges" ] = m
break
print( "entrada invalida, digite novamente")
print()
def custom_build( ):
print( "digite os vertices do grafo" )
print( 'formato: m m i' )
print( "m -> minuscula")
print( "i -> inteiro" )
print( "digite -1 se acabou")
input_info[ "nodes" ] = set()
input_info[ "edges" ] = dict()
while True:
tup = input().rstrip()
if tup == END_CHAR:
break
elif tup == EXIT_CHAR: raise InterruptedError
s = tup.split()
if len( s ) != 3 or s[-1] not in string.digits:
print( "entrada invalida, digite novamente")
a , b , c = s
input_info[ "edges" ][ ( a , b ) ] = int( c )
input_info[ "nodes" ].add( a )
input_info[ "nodes" ].add( b )
print()
def char_choice( ch , nome ):
if ch not in CHAR_TERMS:
raise ValueError
term = CHAR_TERMS[ ch ]
nom = nome.upper()
if nom not in VALID_CHOICES[ term ]:
raise ValueError
input_info[ term ] = nom
def main( args ):
i = 0
while i < len( args ):
m = args[i]
if m[ 0 ] == '-':
char_choice( m[1] , args[ i + 1 ] )
i += 2
build_fun = custom_build
if input_info[ "build" ] == "RANDOM":
build_fun = random_build
build_fun()
print( *input_info.items() , sep = "\n")
if __name__ == "__main__":
main( sys.argv[ 1: ] )
|
{"/monitor_with_animation_test.py": ["/kruskal_monitor.py", "/prim_monitor.py"], "/monitor_test.py": ["/kruskal_monitor.py", "/prim_monitor.py"]}
|
20,303
|
LucasBR96/MST-ANIMATION
|
refs/heads/main
|
/monitor_test.py
|
import kruskal_monitor as krus
import prim_monitor as prim
KRUSKAL = 0
PRIM = 1
def solution_generator( V , E , algo ):
monitor = krus
if algo == PRIM:
monitor = prim
monitor._init( V , E )
while monitor._next():
yield monitor.get_variables()
def pretty_vars( mst_vars ):
edge_status , current_edge , Va , Ea = mst_vars
s = ''
s += "edge_status = {}".format( edge_status ) + "\n"
s += "current_edge = {} {}".format( *current_edge ) + "\n"
s += "nodes in tree: " + "\n"
s += "\t" + ' '.join( Va ) + "\n"
s += "edges in tree:" + "\n"
for a , b in Ea:
s += "\t" + "{} {}".format( a , b ) +"\n"
return s
def main():
print( "escolha o algoritimo:" )
print( "0 - kruskal" )
print( "1 - prim" )
n = int( input() )
print()
print( "digite os vertices do grafo" )
print( 'formato: m m i' )
print( "m -> minuscula")
print( "i -> inteiro" )
print( "digite -1 se acabou")
E = dict()
V = set()
while True:
tup = input().rstrip()
if tup == "-1":
break
a , b , m = tup.split()
E[ ( a , b ) ] = int( m )
V.add( a )
V.add( b )
for x in solution_generator( V , E , n ):
input()
print( pretty_vars( x ) )
main()
|
{"/monitor_with_animation_test.py": ["/kruskal_monitor.py", "/prim_monitor.py"], "/monitor_test.py": ["/kruskal_monitor.py", "/prim_monitor.py"]}
|
20,304
|
LucasBR96/MST-ANIMATION
|
refs/heads/main
|
/kruskal_monitor.py
|
#GLOBAL VARIABLE VISIBLE BY FILE ONLY ------------------------------------------------
END = -1
SELECT = 0
CONSIDER = 1
UPDATE = 2
global_status = SELECT
E_prime = []
T = dict()
#GLOBAL VARIABLE VISIBLE BY OUTSIDERS------------------------------------------------
CONSIDERED = 0
REJECTED = 1
ACCEPTED = 2
edge_status = CONSIDERED
current_edge = ( -1 , -1 )
Va = set()
Ea = set()
N = 0
pos = 0
#MONITOR FUNCTIONS------------------------------------------------------------------
def _select_fun():
global current_edge, edge_status, global_status
current_edge = E_prime[ pos ]
edge_status = CONSIDERED
global_status = CONSIDER
def _consider_fun( ):
global edge_status, global_status
( x , y ) = current_edge
r1 = T[ x ]
r2 = T[ y ]
edge_status = REJECTED
if r1 != r2:
edge_status = ACCEPTED
global_status = UPDATE
def _update_fun( ):
global Va, Ea, current_edge, edge_status, T, global_status
if edge_status == ACCEPTED:
( x , y ) = current_edge
Va.add( x )
Va.add( y )
Ea.add( ( x , y ) )
n = T[ x ]
for a in T:
if T[ a ] == n: T[ a ] = T[ y ]
global pos , N
pos = pos + 1
global_status = SELECT if pos < N else END
def _init( V , E ):
global E_prime , N
E_prime = [ tup for tup in E ]
E_prime.sort( key = lambda x : E[ x ] )
N = len( E_prime )
global T
T = { v:i for i , v in enumerate( V ) }
def _next( ):
if global_status == END:
return False
if global_status == SELECT:
_select_fun()
elif global_status == CONSIDER:
_consider_fun()
elif global_status == UPDATE:
_update_fun()
return True
def get_variables():
return( edge_status , current_edge , Va.copy() , Ea.copy() )
if __name__ == "__main__":
V = set( ["a" , "b" , "c" , "d", "e" ] )
E = {
('a','b'):2,
('a','c'):3,
('a','d'):4,
('c','d'):1,
('b','d'):2,
('d','e'):7,
('c','e'):3,
('a','e'):2
}
_init( V , E )
while _next():
input()
t = get_variables()
print( "-"*25 )
print( *t , sep = "\n" )
pass
|
{"/monitor_with_animation_test.py": ["/kruskal_monitor.py", "/prim_monitor.py"], "/monitor_test.py": ["/kruskal_monitor.py", "/prim_monitor.py"]}
|
20,305
|
LucasBR96/MST-ANIMATION
|
refs/heads/main
|
/prim_monitor.py
|
from collections import deque
#GLOBAL VARIABLE VISIBLE BY FILE ONLY ------------------------------------------------
END = -1
SELECT = 0
CONSIDER = 1
UPDATE = 2
global_status = SELECT
Possible_neighbors = deque([])
Adj_lst = dict()
E_val = dict()
#GLOBAL VARIABLE VISIBLE BY OUTSIDERS------------------------------------------------
CONSIDERED = 0
REJECTED = 1
ACCEPTED = 2
edge_status = CONSIDERED
current_edge = ( -1 , -1 )
Va = set()
Ea = set()
#AUXILIARY FUNCTIONS ---------------------------------------------------------------
def intercal( arr1 , arr2 , foo ):
i , j = 0 , 0
seq = []
while i < len( arr1 ) or j < len( arr2 ):
if i >= len( arr1 ):
seq.append( arr2[ j ] )
j += 1
elif j >= len( arr2 ):
seq.append( arr1[ i ] )
i += 1
elif foo( arr1[ i ] ) < foo( arr2[ j ] ):
seq.append( arr1[ i ] )
i +=1
else:
seq.append( arr2[ j ] )
j += 1
return seq
#MONITOR FUNCTIONS------------------------------------------------------------------
def _select_fun():
global Possible_neighbors , current_edge, edge_status , global_status
current_edge = Possible_neighbors.popleft()
edge_status = CONSIDERED
global_status = CONSIDER
def _consider_fun():
global edge_status, global_status
a , b = current_edge
r1 = a in Va
r2 = b in Va
edge_status = ACCEPTED if r1^r2 else REJECTED
global_status = UPDATE
def _update_fun():
global global_status
if edge_status == ACCEPTED:
global Ea, Va
Ea.add( current_edge )
( a , b ) = current_edge
y = a if b in Va else b
Va.add( y )
global Adj_lst, E_val, Possible_neighbors
new_edges = [ tup for tup in Adj_lst[ y ] if tup != ( a , b ) ]
Possible_neighbors = deque( intercal( Possible_neighbors , new_edges , lambda x: E_val[ x ] ) )
global_status = END if len( Possible_neighbors ) == 0 else SELECT
def _init( V , E ):
global E_val
E_val = E
E_set = list( tup for tup in E )
E_set.sort( key = lambda x: E[ x ] )
global Adj_lst
for edge in E_set:
( a , b ) = edge
Adj_lst[ a ] = Adj_lst.get( a , [] ) + [ edge ]
Adj_lst[ b ] = Adj_lst.get( b , [] ) + [ edge ]
global Possible_neighbors , Va, Ea
Possible_neighbors.extend( Adj_lst[ a ] )
Va.add( a )
def get_variables():
return( edge_status , current_edge , Va.copy() , Ea.copy() )
def _next():
if global_status == END:
return False
if global_status == SELECT:
_select_fun()
elif global_status == CONSIDER:
_consider_fun()
elif global_status == UPDATE:
_update_fun()
return True
if __name__ == "__main__":
# E = dict()
E = {
('a','b') :1 ,
('a', 'd'): 2,
('a', 'i'): 7,
('b', 'c'): 3,
('b', 'd'): 5,
('c', 'd'): 3,
('c', 'e'): 2,
('d', 'i'): 1,
('d', 'e'): 2,
('e', 'f'): 3,
('e', 'g'): 4,
('e', 'h'): 2,
('f', 'g'): 8,
('g', 'h'): 2,
('h', 'i'): 10
}
V = set()
_init( V , E )
while _next():
# input()
print()
t = get_variables()
print( "-"*25 )
print( *t , sep = "\n" )
pass
|
{"/monitor_with_animation_test.py": ["/kruskal_monitor.py", "/prim_monitor.py"], "/monitor_test.py": ["/kruskal_monitor.py", "/prim_monitor.py"]}
|
20,323
|
cms-sw/ib-scheduler
|
refs/heads/master
|
/buildRequestAPI.py
|
#!/usr/bin/env python
import ws_sso_content_reader
DEFAULT_TC_URL = "https://eulisse.web.cern.ch/eulisse/cgi-bin/git-collector/buildrequests"
def setTCBaseURL(url):
DEFAULT_TC_URL = url
def call(method, obj, **kwds):
if method == "GET":
opts = urlencode(kwds)
return loads(ws_sso_content_reader.getContent(join(tcBaseURL, obj) + "?" + opts, None, method))
elif method in ["POST", "PATCH", "DELETE"]:
opts = dumps(kwds)
return loads(ws_sso_content_reader.getContent(join(tcBaseURL, obj), opts, method))
|
{"/buildRequestAPI.py": ["/ws_sso_content_reader.py"]}
|
20,324
|
cms-sw/ib-scheduler
|
refs/heads/master
|
/setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='IB Scheduler',
version='1.0',
description='CMS IB Utilities',
author='CMS Collaboration',
author_email='hn-cms-sw-develtools@@cern.ch',
url='http://cmssdt.cern.ch',
py_modules=["tagCollectorAPI",
"ws_sso_content_reader",
"all_json",
"Lock"],
scripts=['autoIB.py']
)
|
{"/buildRequestAPI.py": ["/ws_sso_content_reader.py"]}
|
20,325
|
cms-sw/ib-scheduler
|
refs/heads/master
|
/autoCreateIb.py
|
#!/usr/bin/env python
# A simple script which creates IBs in git.
from commands import getstatusoutput
from optparse import OptionParser
from datetime import datetime, timedelta
from time import strftime
import re
def expandDates(s):
today = datetime.today()
tw=str(int(today.strftime("%W")) % 2)
nw=str(int((today + timedelta(days=7)).strftime("%W")) % 2)
pw=str(int((today + timedelta(days=-7)).strftime("%W")) % 2)
return strftime(s.replace("@TW", tw).replace("@NW", nw).replace("@PW", pw))
def format(s, **kwds):
return s % kwds
def tagRelease(tag, branch, timestamp):
(day, t) = timestamp.rsplit("-", 1)
hour = t[0:2] + ":" + t[2:4]
cmd = format("set -e;"
"TEMP=`mktemp -d`;"
"if [ -d /afs/cern.ch/cms/slc5_amd64_gcc472/external/git/1.8.3.1/etc/profile.d/init.sh ]; then"
" source /afs/cern.ch/cms/slc5_amd64_gcc472/external/git/1.8.3.1/etc/profile.d/init.sh;"
"fi;"
"git clone $REFERENCE -b %(branch)s git@github.com:cms-sw/cmssw.git $TEMP/cmssw;"
"cd $TEMP/cmssw;"
"git tag %(tag)s `git rev-list -n 1 --before='%(day)s %(hour)s' %(branch)s`;"
"git push origin --tags;"
"rm -rf $TEMP",
day=day,
hour=hour,
branch=branch,
tag=tag)
err, out = getstatusoutput(cmd)
if err:
print "Error while executing command:"
print cmd
print out
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-b", "--base", help="The release branch to use for this.", default=None, dest="base")
parser.add_option("-D", "--date", help="Use this timestamp for the tag.", default=None, dest="timestamp")
opts, args = parser.parse_args()
if len(args) == 0:
parser.error("You need to specify a tag")
if len(args) > 1:
parser.error("Too many tags")
release = expandDates(args[0])
if not opts.base:
m = re.match("(CMSSW_[0-9]+_[0-9]+).*", release)
if not m:
parser.error("Could not determine the release branch, please provide one with -b, --base")
opts.base = m.group(1) + "_X"
if opts.timestamp:
opts.timestamp = expandDates(opts.timestamp)
else:
m = re.match("CMSSW_[0-9]+_[0-9]+_.*?([0-9]{4}-[0-9]{2}-[0-9]{2}-[0-9]{4})$", release)
if not m:
parser.error("Could not determine date from release name. Please specify it via -D")
opts.timestamp = m.group(1)
tagRelease(release, opts.base, opts.timestamp)
|
{"/buildRequestAPI.py": ["/ws_sso_content_reader.py"]}
|
20,326
|
cms-sw/ib-scheduler
|
refs/heads/master
|
/all_json.py
|
# Apparently there are many ways to import json, depending on the python
# version. This should make sure you get one.
try:
from json import loads
from json import dumps
except:
try:
from json import read as loads
from json import write as dumps
except:
from simplejson import loads
from simplejson import dumps
|
{"/buildRequestAPI.py": ["/ws_sso_content_reader.py"]}
|
20,327
|
cms-sw/ib-scheduler
|
refs/heads/master
|
/ws_sso_content_reader.py
|
#!/usr/bin/env python
###Description: The tool reads cern web services behind SSO using user certificates
import os, urllib, urllib2, httplib, cookielib, sys, HTMLParser, re
from optparse import OptionParser
from os.path import expanduser, dirname, realpath
from logging import debug, error, warning, DEBUG
import logging
DEFAULT_CERT_PATH="~/.globus/usercert.pem"
DEFAULT_KEY_PATH="~/.globus/userkey.pem"
def setDefaultCertificate(cert, key):
DEFAULT_CERT_PATH=cert
DEFAULT_KEY_PATH=key
class HTTPSClientAuthHandler(urllib2.HTTPSHandler):
def __init__(self):
urllib2.HTTPSHandler.__init__(self)
self.key = realpath(expanduser(DEFAULT_KEY_PATH))
self.cert = realpath(expanduser(DEFAULT_CERT_PATH))
def https_open(self, req):
return self.do_open(self.getConnection, req)
def getConnection(self, host, timeout=300):
return httplib.HTTPSConnection(host, key_file=self.key, cert_file=self.cert)
def _getResponse(opener, url, data=None, method="GET"):
request = urllib2.Request(url)
if data:
request.add_data(data)
if method != "GET":
request.get_method = lambda : method
response = opener.open(request)
debug("Code: %s\n" % response.code)
debug("Headers: %s\n" % response.headers)
debug("Msg: %s\n" % response.msg)
debug("Url: %s\n" % response.url)
return response
def getSSOCookie(opener, target_url, cookie):
opener.addheaders = [('User-agent', 'curl-sso-certificate/0.0.2')] #in sync with cern-get-sso-cookie tool
# For some reason before one needed to have a parent url. Now this does not seem to be the case anymore...
#parentUrl = "/".join(target_url.split("/", 4)[0:5]) + "/"
parentUrl = target_url
print parentUrl
url = urllib2.unquote(_getResponse(opener, parentUrl).url)
content = _getResponse(opener, url).read()
ret = re.search('<form .+? action="(.+?)">', content)
if ret == None:
raise Exception("error: The page doesn't have the form with adfs url, check 'User-agent' header")
url = urllib2.unquote(ret.group(1))
h = HTMLParser.HTMLParser()
post_data_local = []
for match in re.finditer('input type="hidden" name="([^"]*)" value="([^"]*)"', content):
post_data_local += [(match.group(1), h.unescape(match.group(2)))]
if not post_data_local:
raise Exception("error: The page doesn't have the form with security attributes, check 'User-agent' header")
_getResponse(opener, url, urllib.urlencode(post_data_local)).read()
def getContent(target_url, post_data=None, method="GET"):
cert_path = expanduser(DEFAULT_CERT_PATH)
key_path = expanduser(DEFAULT_KEY_PATH)
cookie = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie), HTTPSClientAuthHandler())
debug("The return page is sso login page, will request cookie.")
hasCookie = False
# if the access gave an exception, try to get a cookie
try:
getSSOCookie(opener, target_url, cookie)
hasCookie = True
result = _getResponse(opener, target_url, post_data, method).read()
finally:
if hasCookie:
try:
_getResponse(opener, "https://login.cern.ch/adfs/ls/?wa=wsignout1.0").read()
except:
error("Error, could not logout correctly from server")
return result
if __name__ == "__main__":
parser = OptionParser(usage="%prog [-d(ebug)] -o(ut) COOKIE_FILENAME -c(cert) CERN-PEM -k(ey) CERT-KEY -u(rl) URL")
parser.add_option("-d", "--debug", dest="debug", help="Enable pycurl debugging. Prints to data and headers to stderr.", action="store_true", default=False)
parser.add_option("-p", "--postdata", dest="postdata", help="Data to be sent as post request", action="store", default=None)
parser.add_option("-m", "--method", dest="method", help="Method to be used for the request", action="store", default="GET")
parser.add_option("-c", "--cert", dest="cert_path", help="Absolute path to cert file.", action="store", default=DEFAULT_CERT_PATH)
parser.add_option("-k", "--key", dest="key_path", help="Absolute path to key file.", action="store", default=DEFAULT_KEY_PATH)
(opts, args) = parser.parse_args()
if not len(args) == 1:
parser.error("Please specify a URL")
url = args[0]
if opts.debug:
logging.getLogger().setLevel(DEBUG)
if opts.postdata == "-":
opts.postdata = sys.stdin.read()
try:
setDefaultCertificate(opts.cert_path, opts.key_path)
content = getContent(url, opts.postdata, opts.method)
except urllib2.HTTPError, e:
print e
content = ""
print content
|
{"/buildRequestAPI.py": ["/ws_sso_content_reader.py"]}
|
20,328
|
cms-sw/ib-scheduler
|
refs/heads/master
|
/autoIB.py
|
#!/usr/bin/env python
# This script allows you to execute various misc test to automate IB building
# steps, in particular:
#
# - Reset the weekly repository.
# - Build and upload externals in the weekly repository.
# - Build and upload ibs in the weekly repository.
#
from optparse import OptionParser
import buildRequestAPI as api
import sys, os, socket
from urllib2 import urlopen
from urllib import urlencode
import xml.parsers.expat
from commands import getstatusoutput
from getpass import getuser
from time import strftime
from os.path import abspath, join, dirname, exists, expanduser
import re
from Lock import Lock
from datetime import datetime, timedelta
import ws_sso_content_reader
scriptPath = os.path.dirname( os.path.abspath(sys.argv[0]) )
if scriptPath not in sys.path:
sys.path.append(scriptPath)
from all_json import loads, dumps
DEFAULT_API_URL = "https://cmsgit.web.cern.ch/cmsgit/buildrequests"
def setTCUrl(url):
global DEFAULT_API_URL
DEFAULT_API_URL = url
def call(obj, method, **kwds):
obj = str(obj).strip("/")
print obj,":", method
print kwds
if method == "GET":
opts = urlencode(kwds)
result = ws_sso_content_reader.getContent(join(DEFAULT_API_URL, obj) + "?" + opts, None, method)
elif method in ["POST", "PATCH", "DELETE"]:
opts = dumps(kwds)
result = ws_sso_content_reader.getContent(join(DEFAULT_API_URL, obj), opts, method)
print result
return loads(result)
try:
from hashlib import sha1 as sha
def hash(s):
return sha(s).hexdigest()
except ImportError:
import sha
def hash(s):
return sha.new(s).hexdigest()
def overloaded(maxLoad):
err,out = getstatusoutput("uptime | sed -e 's/^.* //'")
if err:
return False
return float(out) > float(maxLoad)
# Replace @TW with the week number, modulo 2
# Replace @NW with the week number, modulo 2
# Replace @PW with the week number, modulo 2
def expandDates(s):
today = datetime.today()
tw=str(int(today.strftime("%W")) % 2)
nw=str(int((today + timedelta(days=7)).strftime("%W")) % 2)
pw=str(int((today + timedelta(days=-7)).strftime("%W")) % 2)
return strftime(s.replace("@TW", tw).replace("@NW", nw).replace("@PW", pw))
def expandRelease(s, release):
# The queue is always CMSSW_x_y_X
queue = re.sub("(CMSSW_[0-9]+_[0-9]+).*", "\\1_X", release)
s = s.replace("@RELEASE", release)
s = s.replace("@QUEUE", queue)
return s
# Sanitized caracters which could possibly allow execution of unwanted
# commands.
def sanitize(s):
if not s:
return ""
return re.sub("[.]/", ".", re.sub("[^0-9a-zA-Z_,:./-]", "", s))
def format(s, **kwds):
return s % kwds
def die(s):
print s
sys.exit(1)
EXTERNAL_INFO_URL="https://raw.github.com/cms-sw/cmsdist/IB/%s/stable/config.map"
# Get external information from github.
# See http://cms-sw.github.io/cmsdist/
# for the format of the config.map file.
def getExternalsTags(release_queue, architecture):
# Get the mapping between architecture and release
url = EXTERNAL_INFO_URL % release_queue
try:
data = urlopen(url).read()
except:
die("Unable to find CMSDIST information for release queue %s." % release_queue)
lines = [x.strip().split(";") for x in data.split("\n") if x.strip()]
archInfo = {}
for line in lines:
parts = dict(x.split("=") for x in line)
if not "SCRAM_ARCH" in parts:
die("Bad file format for config.map")
if parts["SCRAM_ARCH"] == architecture:
archInfo = dict(parts)
break
if not archInfo.get("CMSDIST_TAG", None) or not archInfo.get("PKGTOOLS_TAG", None):
die(format("Could not find architecture %(architecture)s for release series %(release_queue)s.\n"
"Please update `config.map' file in the CMSDIST branch IB/%(release_queue)s/stable",
release_queue=release_queue,
architecture=architecture))
return {"PKGTOOLS": archInfo["PKGTOOLS_TAG"],
"CMSDIST": archInfo["CMSDIST_TAG"]}
def process():
# Get the first task from the list
# Check if we know what to do
# Mark it as started
# Start doing it
parser = OptionParser(usage="%prog process [options]")
parser.add_option("--match-arch", metavar="REGEX", dest="matchArch", help="Limit architectures to those matching REGEX", default=".*")
parser.add_option("--match-release", metavar="REGEX", dest="matchRelease", help="Limit releases to those matching REGEX", default=".*")
parser.add_option("--work-dir", "--top-dir", metavar="PATH", dest="workdir", help="Work dir where processing happens", default=None)
parser.add_option("--jobs", "-j", type="int", metavar="N", dest="jobs", help="Number of parallel building threads", default=1)
parser.add_option("--builders", type="int", metavar="N", dest="builders", help="Number of packages built in parallel", default=1)
parser.add_option("--debug", metavar="PATH", dest="debug", help="Print out what's happening", action="store_true", default=False)
parser.add_option("--dry-run", "-n", metavar="BOOL", dest="dryRun", help="Do not execute", action="store_true", default=False)
parser.add_option("--api-url", metavar="URL", dest="apiUrl", help="Specify API endpoint URL", default=DEFAULT_API_URL)
parser.add_option("--max-load", type="int", metavar="LOAD", dest="maxLoad", help="Do not execute if average last 15 minutes load > LOAD", default=8)
opts, args = parser.parse_args()
setTCUrl(opts.apiUrl)
if not opts.workdir:
print "Please specify a workdir"
sys.exit(1)
if exists("/etc/iss.nologin"):
print "/etc/iss.nologin found. Not doing anything and waiting for machine out of maintainance mode."
sys.exit(1)
opts.workdir = abspath(opts.workdir)
thisPath=dirname(__file__)
getstatusoutput(format(
"%(here)s/syncLogs.py %(workdir)s",
here=thisPath,
workdir=opts.workdir))
lockPath = join(opts.workdir, "cms", ".cmsLock")
lock = Lock(lockPath, True, 60*60*12)
if not lock:
if opts.debug:
print "Lock found in %s" % lockPath
sys.exit(1)
lock.__del__()
if overloaded(opts.maxLoad):
print "Current load exceeds maximum allowed of %s." % opts.maxLoad
sys.exit(1)
tasks = call("/", "GET",
release_match=opts.matchRelease,
architecture_match=opts.matchArch,
state="Pending")
print tasks
if not len(tasks):
if opts.debug:
print "Nothing to be done which matches release %s and architecture %s" % (opts.matchArch, opts.matchRelease)
sys.exit(1)
# Look up for a hostname-filter option in the payload and if it is there,
# make sure we match it.
runnableTask = None
for task in tasks:
if not "payload" in task:
continue
if re.match(task["payload"].get("hostnameFilter", ".*"), socket.gethostname()):
runnableTask = task
break
if not runnableTask:
print "Nothing to be done on this machine."
sys.exit(1)
# Default payload options.
payload = {"debug": False}
payload.update(runnableTask["payload"])
# We can now specify tags in the format repository:tag to pick up branches
# from different people.
payload["pkgtools_remote"] = "cms-sw"
payload["cmsdist_remote"] = "cms-sw"
if ":" in payload["PKGTOOLS"]:
payload["pkgtools_remote"], payload["PKGTOOLS"] = payload["PKGTOOLS"].split(":", 1)
if ":" in payload["CMSDIST"]:
payload["cmsdist_remote"], payload["CMSDIST"] = payload["CMSDIST"].split(":", 1)
if opts.dryRun:
print "Dry run. Not building"
sys.exit(1)
ok = call(runnableTask["id"], "PATCH",
url="http://cmssdt.cern.ch/SDT/tc-ib-logs/%s/log.%s.html" % (socket.gethostname(), runnableTask["id"]),
machine=socket.gethostname(),
pid=os.getpid(),
state="Running")
if not ok:
print "Could not change request %s state to building" % runnableTask["id"]
sys.exit(1)
# Build the package.
# We gracefully handle any exception (broken pipe, ctrl-c, SIGKILL)
# by failing the request if they happen. We also always cat
# the log for this build in a global log file.
log = ""
getstatusoutput(format(
"echo 'Log not sync-ed yet' > %(workdir)s/log.%(task_id)s;\n"
"%(here)s/syncLogs.py %(workdir)s",
task_id=runnableTask["id"],
here=thisPath,
workdir=opts.workdir))
try:
print "Building..."
error, log = getstatusoutput(format("set -e ;\n"
"mkdir -p %(workdir)s/%(task_id)s ;\n"
"export CMS_PATH=%(workdir)s/cms ;\n"
"cd %(workdir)s ;\n"
"( echo 'Building %(package)s using %(cmsdistRemote)s:%(cmsdistTag)s';\n"
" rm -rf %(task_id)s;\n"
" git clone git://github.com/%(cmsdistRemote)s/cmsdist.git %(task_id)s/CMSDIST || git clone https://:@git.cern.ch/kerberos/CMSDIST.git %(task_id)s/CMSDIST;\n"
" pushd %(task_id)s/CMSDIST; git checkout %(cmsdistTag)s; popd;\n"
" PKGTOOLS_TAG=\"`echo %(pkgtoolsTag)s | sed -e's/\\(V[0-9]*-[0-9]*\\).*/\\1-XX/'`\";\n"
" git clone git://github.com/%(pkgtoolsRemote)s/pkgtools.git %(task_id)s/PKGTOOLS || git clone https://:@git.cern.ch/kerberos/PKGTOOLS.git %(task_id)s/PKGTOOLS;\n"
" pushd %(task_id)s/PKGTOOLS; git checkout $PKGTOOLS_TAG; popd;\n"
" echo \"### RPM cms dummy `date +%%s`\n%%prep\n%%build\n%%install\n\" > %(task_id)s/CMSDIST/dummy.spec ;\n"
" set -x ;\n"
" rm -rf %(workdir)s/cms %(workdir)s/b ;\n"
" perl -p -i -e 's/### RPM cms cmssw.*/### RPM cms cmssw %(base_release_name)s/' %(task_id)s/CMSDIST/cmssw.spec ;\n"
" perl -p -i -e 's/### RPM cms cmssw-ib .*/### RPM cms cmssw-ib %(base_release_name)s/' %(task_id)s/CMSDIST/cmssw-ib.spec ;\n"
" perl -p -i -e 's/### RPM cms cmssw-qa .*/### RPM cms cmssw-qa %(base_release_name)s/' %(task_id)s/CMSDIST/cmssw-qa.spec ;\n"
" perl -p -i -e 's/### RPM cms cmssw-validation .*/### RPM cms cmssw-validation %(base_release_name)s/' %(task_id)s/CMSDIST/cmssw-validation.spec ;\n"
" perl -p -i -e 's/### RPM cms cmssw-patch.*/### RPM cms cmssw-patch %(real_release_name)s/' %(task_id)s/CMSDIST/cmssw-patch.spec ;\n"
" %(workdir)s/%(task_id)s/PKGTOOLS/cmsBuild %(debug)s --new-scheduler --cmsdist %(workdir)s/%(task_id)s/CMSDIST %(ignoreErrors)s --builders %(builders)s -j %(jobs)s --repository %(repository)s --architecture %(architecture)s --work-dir %(workdir)s/cms build %(package)s ;\n"
" %(workdir)s/%(task_id)s/PKGTOOLS/cmsBuild %(debug)s --new-scheduler --cmsdist %(workdir)s/%(task_id)s/CMSDIST --repository %(repository)s --upload-tmp-repository %(tmpRepository)s %(syncBack)s --architecture %(architecture)s --work-dir %(workdir)s/cms upload %(package)s ;\n"
" PKG_BUILD=`find %(workdir)s/cms/RPMS/%(architecture)s -name \"*%(package)s*\"| sed -e's|.*/||g;s|-1-1.*||g'`;\n"
" set +x ;\n"
" echo Build completed. you can now install the package built by doing: ;\n"
" echo \"wget http://cmsrep.cern.ch/cmssw/cms/bootstrap.sh\" ;\n"
" echo \"sh -x ./bootstrap.sh setup -path w -arch %(architecture)s -r %(repository)s >& bootstrap_%(architecture)s.log \";\n"
" echo \"(source w/%(architecture)s/external/apt/*/etc/profile.d/init.sh ; apt-get install $PKG_BUILD )\" ;\n"
" echo AUTOIB SUCCESS) 2>&1 | tee %(workdir)s/log.%(task_id)s",
workdir=opts.workdir,
debug=payload["debug"] == True and "--debug" or "",
cmsdistTag=sanitize(payload["CMSDIST"]),
pkgtoolsTag=sanitize(payload["PKGTOOLS"]),
cmsdistRemote=sanitize(payload["cmsdist_remote"]),
pkgtoolsRemote=sanitize(payload["pkgtools_remote"]),
architecture=sanitize(runnableTask["architecture"]),
release_name=sanitize(re.sub("_[A-Z]+_X", "_X", runnableTask["release"])),
base_release_name=re.sub("_[^_]*patch[0-9]*$", "", sanitize(payload["release"])),
real_release_name=sanitize(payload["release"]),
package=sanitize(payload["package"]),
repository=sanitize(payload["repository"]),
syncBack=payload["syncBack"] == True and "--sync-back" or "",
ignoreErrors=payload["ignoreErrors"] == True and "-k" or "",
tmpRepository=sanitize(payload["tmpRepository"]),
task_id=runnableTask["id"],
jobs=opts.jobs,
builders=opts.builders))
getstatusoutput(format("echo 'Task %(task_id)s completed successfully.' >> %(workdir)s/log.%(task_id)s",
workdir=opts.workdir,
task_id=runnableTask["id"]))
except Exception, e:
log = open(format("%(workdir)s/log.%(task_id)s", workdir=opts.workdir, task_id=runnableTask["id"])).read()
log += "\nInterrupted externally."
log += str(e)
getstatusoutput(format("echo 'Interrupted externally' >> %(workdir)s/log.%(task_id)s",
workdir=opts.workdir,
task_id=runnableTask["id"]))
error, saveLog = getstatusoutput(format("set -e ;\n"
"echo '#### Log file for %(task_id)s' >> %(workdir)s/log ;\n"
"cat %(workdir)s/log.%(task_id)s >> %(workdir)s/log",
workdir=opts.workdir,
task_id=runnableTask["id"]))
getstatusoutput("%s/syncLogs.py %s" % (thisPath, opts.workdir))
if not "AUTOIB SUCCESS" in log:
call(runnableTask["id"], "PATCH",
state="Failed",
url="http://cmssdt.cern.ch/SDT/tc-ib-logs/%s/log.%s.html" % (socket.gethostname(), runnableTask["id"] ))
print log
print saveLog
sys.exit(1)
call(runnableTask["id"], "PATCH",
state="Completed",
url="http://cmssdt.cern.ch/SDT/tc-ib-logs/%s/log.%s.html" % (socket.gethostname(), runnableTask["id"]))
# Here we are done processing the job. Now schedule continuations.
if not "continuations" in payload:
sys.exit(0)
continuationsSpec = payload["continuations"] or ""
continuations = [x for x in continuationsSpec.split(";")]
if len(continuations) == 0:
sys.exit(0)
if len(continuations) != 1:
print "WARNING: multiple continuations not supported yet"
if opts.debug:
print continuations
nextTasks = [p.split(":", 1) for p in continuations[0].split(",") if ":" in p]
for package, architecture in nextTasks:
options = {}
# Notice that continuations will not support overriding CMSDIST and
# PKGTOOLS completely.
#
# We do not want that because there could be cases where
# the first step is done for one architecture, while the second
# step is done for another.
options["PKGTOOLS"] = sanitize(payload["PKGTOOLS"])
options["CMSDIST"] = sanitize(payload["CMSDIST"])
# For the moment do not support continuations of continuations.
options["continuations"] = ""
options.update(getExternalsTags(expandRelease("@QUEUE", payload["release"]), architecture))
call("", "POST",
release=sanitize(payload["release"]),
architecture=sanitize(architecture),
repository=sanitize(payload["repository"]),
tmpRepository=sanitize(payload["tmpRepository"]),
syncBack=payload["syncBack"],
debug=payload["debug"],
ignoreErrors=payload["ignoreErrors"],
package=sanitize(package),
PKGTOOLS=options["PKGTOOLS"],
CMSDIST=options["CMSDIST"],
continuations=options["continuations"]
)
def listTasks():
# Get the first task from the list
# Check if we know what to do
# Mark it as started
# Start doing it
parser = OptionParser(usage="%prog list [options]")
parser.add_option("--match-arch", metavar="REGEX", dest="matchArch", help="Limit architectures to those matching REGEX", default=".*")
parser.add_option("--match-release", metavar="REGEX", dest="matchRelease", help="Limit releases to those matching REGEX", default=".*")
parser.add_option("--state", metavar="Running,Pending,Completed,Failed", dest="state", help="Show requests in the given state", default="Running")
parser.add_option("--format", metavar="FORMAT", dest="format", help="Output format", default="%i: %p %r %a")
parser.add_option("--api-url", metavar="URL", dest="apiUrl", help="Specify API endpoint", default=DEFAULT_API_URL)
opts, args = parser.parse_args()
setTCUrl(opts.apiUrl)
results = call("/", "GET",
release_match=opts.matchRelease,
architecture_match=opts.matchArch,
state=opts.state)
if not results:
sys.exit(1)
replacements = [("i", "id"),
("p", "package"),
("a", "architecture"),
("r", "release"),
("s", "state")]
opts.format = opts.format.replace("%", "%%")
for x, y in replacements:
opts.format = opts.format.replace("%%" + x, "%(" + y + ")s")
results = [x.update(x["payload"]) or x for x in results]
print "\n".join([opts.format % x for x in results])
# This will request to build a package in the repository.
# - Setup a few parameters for the request
# - Get PKGTOOLS and CMSDIST from TC if they are not passed
# - Create the request.
def requestBuildPackage():
parser = OptionParser()
parser.add_option("--release", "-r", metavar="RELEASE", dest="release", help="Specify release.", default=None)
parser.add_option("--architecture", "-a", metavar="ARCHITECTURE", dest="architecture", help="Specify architecture", default=None)
parser.add_option("--repository", "-d", metavar="REPOSITORY NAME", dest="repository", help="Specify repository to use for bootstrap", default="cms")
parser.add_option("--upload-tmp-repository", metavar="REPOSITORY SUFFIX", dest="tmpRepository", help="Specify repository suffix to use for upload", default=getuser())
parser.add_option("--pkgtools", metavar="TAG", dest="pkgtools", help="Specify PKGTOOLS version to use. You can specify <user>:<tag> to try out a non official tag.", default=None)
parser.add_option("--cmsdist", metavar="TAG", dest="cmsdist", help="Specify CMSDIST tag branch to use. You can specify <user>:<tag> to try out a non official tag.", default=None)
parser.add_option("--hostname-filter", metavar="HOSTNAME-REGEX", dest="hostnameFilter", help="Specify a given regular expression which must be matched by the hostname of the builder machine.", default=".*")
parser.add_option("--sync-back", metavar="BOOL", dest="syncBack", action="store_true", help="Specify whether or not to sync back the repository after upload", default=False)
parser.add_option("--ignore-compilation-errors", "-k", metavar="BOOL", dest="ignoreErrors", help="When supported by the spec, ignores compilation errors and still packages the available build products", action="store_true", default=False)
parser.add_option("--api-url", metavar="url", dest="apiUrl", help="Specify the url for the API", default=DEFAULT_API_URL)
parser.add_option("--continuations", metavar="SPEC", dest="continuations", help="Specify a comma separated list of task:architecture which need to be scheduled after if this task succeeds", default="")
parser.add_option("--debug", metavar="BOOL", dest="debug", help="Add cmsbuild debug information", action="store_true", default=False)
parser.add_option("--dry-run", "-n", metavar="BOOL", dest="dryRun", help="Do not push the request to tag collector", action="store_true", default=False)
opts, args = parser.parse_args()
if len(args) != 2:
parser.error("You need to specify a package")
setTCUrl(opts.apiUrl)
if not opts.repository:
parser.error("Please specify a repository")
if not opts.release:
parser.error("Please specify a release")
if not opts.architecture:
parser.error("Please specify an architecture")
options = {}
options["hostnameFilter"] = opts.hostnameFilter
options["release"] = expandDates(opts.release)
options["release_queue"] = expandRelease("@QUEUE", options["release"])
options["architecture"] = opts.architecture
options["repository"] = expandRelease(expandDates(opts.repository).replace("@ARCH", options["architecture"]), options["release"])
options["tmpRepository"] = expandDates(opts.tmpRepository)
options["syncBack"] = opts.syncBack
options["package"] = expandDates(args[1])
options["continuations"] = opts.continuations.replace("@ARCH", options["architecture"])
options["ignoreErrors"] = opts.ignoreErrors
options["debug"] = opts.debug
if opts.cmsdist and opts.continuations:
print format("WARNING: you have specified --pkgtools to overwrite the PKGTOOLS tag coming from tag collector.\n"
"However, this will happen only for %(package)s, continuations will still fetch those from the tagcolletor.", package=options["package"])
if opts.cmsdist and opts.continuations:
print format("WARNING: you have specified --cmsdist to overwrite the PKGTOOLS tag coming from tag collector.\n"
"However, this will happen only for %(package)s, continuations will still fetch those from the tagcolletor.", package=options["package"])
# Get the mapping between architecture and release
options.update(getExternalsTags(options["release_queue"], options["architecture"]))
if opts.pkgtools:
options["PKGTOOLS"] = sanitize(expandRelease(opts.pkgtools, options["release"]).replace("@ARCH", options["architecture"]))
if opts.cmsdist:
options["CMSDIST"] = sanitize(expandRelease(opts.cmsdist, options["release"]).replace("@ARCH", options["architecture"]))
if not options.get("CMSDIST"):
print "Unable to find CMSDIST for releases %s on %s" % (options["release"], options["architecture"])
sys.exit(1)
if not options.get("PKGTOOLS"):
print "Unable to find PKGTOOLS for releases %s on %s" % (options["release"], options["architecture"])
sys.exit(1)
if opts.dryRun:
print "Dry run specified, the request would look like:\n %s" % str(options)
sys.exit(1)
call("", "POST", **options)
def cancel():
parser = OptionParser(usage="%prog cancel <request-id>")
parser.add_option("--api-url", metavar="url", dest="apiUrl", help="Specify the url for the API", default=DEFAULT_API_URL)
opts, args = parser.parse_args()
setTCUrl(opts.apiUrl)
if not len(args):
print "Please specify a request id."
ok = call(args[1], "DELETE")
if not ok:
print "Error while cancelling request %s" % args[1]
sys.exit(1)
def reschedule():
parser = OptionParser(usage="%prog reschedule <request-id>")
parser.add_option("--api-url", metavar="url", dest="apiUrl", help="Specify the url for the API", default=DEFAULT_API_URL)
opts, args = parser.parse_args()
setTCUrl(opts.apiUrl)
if not len(args):
print "Please specify a request id."
ok = call(args[1], "PATCH",
pid="",
machine="",
url="",
state="Pending")
if not ok:
print "Error while rescheduling request %s" % args[1]
sys.exit(1)
COMMANDS = {"process": process,
"cancel": cancel,
"list": listTasks,
"request": requestBuildPackage,
"reschedule": reschedule
}
if __name__ == "__main__":
os.environ["LANG"] = "C"
commands = [x for x in sys.argv[1:] if not x.startswith("-")]
if len(commands) == 0 or not commands[0] in COMMANDS.keys():
print "Usage: autoIB.py <command> [options]\n"
print "Where <command> can be among the following:\n"
print "\n".join(COMMANDS.keys())
print "\nUse `autoIB.py <command> --help' to get more detailed help."
sys.exit(1)
command = commands[0]
COMMANDS[command]()
|
{"/buildRequestAPI.py": ["/ws_sso_content_reader.py"]}
|
20,349
|
ozhar1248/trivia_game
|
refs/heads/main
|
/quiz_brain.py
|
class QuizBrain:
def __init__(self, bank):
self.question_bank = bank
self.question_number = 0
self.score = 0
def next_question(self):
ans = input(f"Q.{self.question_number+1}: {self.question_bank[self.question_number].question} (True / False): ")
self.check_answer(ans, self.question_bank[self.question_number].answer)
self.question_number += 1
def has_questions(self):
return self.question_number < len(self.question_bank)
def check_answer(self, user_ans, correct_ans):
if user_ans.lower() == correct_ans.lower():
print("Right!")
self.score += 1
else:
print("Wrong!")
print(f"The correct answer is {correct_ans}")
print(f"Your current score is {self.score}/{len(self.question_bank)}\n")
|
{"/main.py": ["/quiz_brain.py"]}
|
20,350
|
ozhar1248/trivia_game
|
refs/heads/main
|
/main.py
|
from question import Question
from data import question_data
from quiz_brain import QuizBrain
question_bank = []
for item in question_data:
question_bank.append(Question(item["text"], item["answer"]))
quiz = QuizBrain(question_bank)
while quiz.has_questions():
quiz.next_question()
grade = round(quiz.score / len(question_bank) * 100, 2)
print(f"You've completed the quiz\nYour final score is {grade}")
|
{"/main.py": ["/quiz_brain.py"]}
|
20,351
|
Eiyeron/telegram-bot-api
|
refs/heads/master
|
/models.py
|
# Using __dict__ and *args for compulsory args and **kwargs for optional ones.
class User(object):
def __init__(self, *args):
try:
self.__dict__ = args[0]
except:
pass
class GroupChat(object):
def __init__(self, *args):
try:
self.__dict__ = args[0]
except:
pass
# Todo? : Inheritance and create a File superclass
# for all file-related classes?
class PhotoSize:
def __init__(self, data):
if not data:
return
self.file_id = data["file_id"]
self.width = data["width"]
self.height = data["height"]
self.file_size = data.get("file_size", -1)
class Audio:
def __init__(self, data):
self.file_id = data["file_id"]
self.duration = data["duration"]
self.mime_type = data["mime_type"]
self.file_size = data.get("file_size", -1)
class Document:
def __init__(self, data):
self.file_id = data["file_id"]
if 'thumb' in data:
self.thumb = PhotoSize(data["thumb"])
self.file_name = data.get("file_name", "")
self.mime_type = data.get("mime_type", "")
self.file_size = data.get("file_size", -1)
class Sticker:
def __init__(self, data):
self.file_id = data["file_id"]
self.width = data["width"]
self.height = data["height"]
if 'thumb' in data:
self.thumb = PhotoSize(data["thumb"])
self.file_size = data.get("file_size", -1)
class Video:
def __init__(self, data):
self.file_id = data["file_id"]
self.width = data["width"]
self.height = data["height"]
self.duration = data["duration"]
if 'thumb' in data:
self.thumb = PhotoSize(data["thumb"])
self.mime_type = data.get("mime_type", "")
self.file_size = data.get("file_size", -1)
self.caption = data.get("caption", "")
class Contact:
def __init__(self, data):
self.phone_number = data["phone_number"]
self.first_name = data["first_name"]
self.last_name = data.get("last_name", "")
self.user_id = data.get("user_id", "")
class Location:
def __init__(self, data):
self.longitude = data["longitude"]
self.latitude = data["latitude"]
class UserProfilePhotos:
def __init__(self, data):
self.total_count = data["total_count"]
self.photos = []
for row in data["photos"]:
self.photos.append(list(row))
class ReplyKeyBoard(object):
def __init__(self, **kwargs):
self.selective = kwargs.get('selective', False)
class ReplyKeyboardMarkup(ReplyKeyBoard):
def __init__(self, keyboard, **kwargs):
ReplyKeyBoard.__init__(self, **kwargs)
self.keyboard = keyboard
self.reisze_keyboard = kwargs.get("resize_keyboard", False)
self.one_time_keyboard = kwargs.get("one_time_keyboard", False)
class ReplyKeyboardHide(ReplyKeyBoard):
def __init__(self, **kwargs):
ReplyKeyBoard.__init__(self, **kwargs)
self.hide_keyboard = True
class ForceReply(ReplyKeyBoard):
def __init__(self, **kwargs):
ReplyKeyBoard.__init__(self, **kwargs)
self.force_reply = True
replace_dict = {'forward_from': User,
'audio': Audio,
'document': Document,
'sticker': Sticker,
'video': Video,
'contact': Contact,
'location': Location,
'new_chat_participant': User,
'left_chat_participant': User
}
class Message(object):
def __init__(self, *args):
message_dict = {}
for attr, attr_value in args[0].items():
if attr == 'from':
message_dict['from_user'] = User(attr_value)
elif attr == 'chat':
# Finding if we have a GroupChat or an User
if 'first_name' in attr_value:
message_dict[attr] = User(attr_value)
elif 'title' in attr_value:
message_dict[attr] = GroupChat(attr_value)
elif attr in replace_dict:
message_dict[attr] = replace_dict[attr](attr_value)
elif attr == "reply_to_message":
message_dict[attr] = Message(attr_value)
elif attr in ("photo", "new_chat_photo"):
photos = []
for photo in attr_value:
photos.append(PhotoSize(photo))
message_dict[attr] = photos
else:
message_dict[attr] = attr_value
self.__dict__ = message_dict
|
{"/telegram.py": ["/models.py"]}
|
20,352
|
Eiyeron/telegram-bot-api
|
refs/heads/master
|
/telegram.py
|
import requests
import sys
from .models import Message
import json
class Telegram:
"""This class wraps the (almost) whole Telegram API and offers a
handler-based update system to plug to the interface whatever functionality
you want."""
# TODO ? : Convert this into a simple array
# and get value by doing "on_"+"value"
handlerTypeCallback = {
"update": "on_update",
"forward_from": "on_forward",
"reply_to_message": "on_reply",
"text": "on_text",
"audio": "on_audio",
"document": "on_document",
"photo": "on_photo",
"sticker": "on_sticker",
"video": "on_video",
"contact": "on_contact",
"location": "on_location",
"new_chat_participant": "on_new_chat_carticipant",
"left_chat_participant": "on_left_chat_participant",
"new_chat_title": "on_new_chat_title",
"new_chat_photo": "on_new_chat_photo",
"delete_chat_Photo": "on_delete_chat_photo",
"group_chat_created": "on_group_chat_created",
}
def __init__(self, api_url, token):
self.api_url = api_url
self.access_token = token
self.loopingUpdateHandler = False
self.lastID = 0
self.handlers = []
def send_request(self, action, params={}, files=[]):
"""Wraps the url building and sends the requst to Telegram's servers.
Returns the processed data in JSON or a JSON object containing the
error message."""
url = "{}{}/{}".format(self.api_url, self.access_token, action)
r = requests.get(url, params=params, files=files)
try:
return r.json()
except ValueError:
print("There has been a parsing error on this message : {}"
.format(r.text))
return {"ok": False,
"why": "Parsing Error",
"message": r.text}
def send_file(self, chat_id, command, method, file_data,
reply_to_message_id="",
reply_markup=""):
"""Wraps the file sending process."""
args = {"chat_id": chat_id,
"reply_to_message_id": reply_to_message_id,
"reply_markup": reply_markup}
files = {}
# Checking if it's a resend id.
if isinstance(file_data, str):
args[method] = file_data
else:
files[method] = file_data
return self.send_request(command, args, files)
def get_updates(self, offset=0, limit=100, timeout=0):
"""Using /getUpdates to poll updates from Telegram."""
return self.send_request("getUpdates", {"offset": offset,
"limit": limit,
"timeout": timeout})
def send_message(self, chat_id, text,
reply_to_message_id=None,
reply_markup=None):
"""Sends a text-only message to a chat/user."""
params = {"chat_id": chat_id, "text": text}
if reply_to_message_id is not None:
params["reply_to_message_id"] = reply_to_message_id
if reply_markup is not None:
params["reply_markup"] = reply_markup
return self.send_request("sendMessage", params)
def send_keyboard_markup(self, chat_id, keyboard, message,
resize_keyboard=False,
one_time_keyboard=False,
selective=False):
reply_markup = {
"keyboard": keyboard,
"resize_keyboard": resize_keyboard,
"one_time_keyboard": one_time_keyboard,
"selective": selective}
return self.send_message(chat_id, message, None,
json.dumps(reply_markup,
separators=(',', ':')))
def forward_message(self, chat_id, from_chat_id, message_id):
"""Forwards a message from a chat to another chat."""
return self.send_request("forwardMessage",
{"chat_id": chat_id,
"from_chat_id": from_chat_id,
"message_id": message_id})
def get_me(self):
"""Returns the basic infos about the bot. Good function for testing
if communicating to Telegram works."""
return self.send_request("getMe")
def send_photo(self, chat_id, photo,
reply_to_message_id="", reply_markup=""):
"""Sends a photo the "quick way", a client will receive a smaller,
compressed version of the original file. Prefer send_document if
you need the original version to be sent."""
return self.send_file(chat_id, "sendPhoto", "photo", photo,
reply_to_message_id, reply_markup)
def send_audio(self, chat_id, audio,
reply_to_message_id="", reply_markup=""):
"""Sends an audio file."""
return self.send_file(chat_id, "sendAudio", "audio", audio,
reply_to_message_id, reply_markup)
def send_document(self, chat_id, document,
reply_to_message_id="", reply_markup=""):
"""Sends a document, whatever its filetype is. Perfect for sending
pictures without affecting their quality/size, GIFs, or all the files
you want."""
return self.send_file(chat_id, "sendDocument", "document", document,
reply_to_message_id, reply_markup)
def send_sticker(self, chat_id, sticker,
reply_to_message_id="", reply_markup=""):
"""Sends a sticker to the given chat. You have to find a way
to know the sticker id before as no infos are given on them
unless you were sent one."""
return self.send_file(chat_id, "sendSticker", "sticker", sticker,
reply_to_message_id, reply_markup)
def send_video(self, chat_id, video,
reply_to_message_id="", reply_markup=""):
"""Sends a video. Looks like Telegram's servers compress
and scale down them. Prefer send_document if you need the
original version to be sent."""
return self.send_file(chat_id, "sendVideo", "video", video,
reply_to_message_id, reply_markup)
def send_location(self, chat_id, latitude, longitude,
reply_to_message_id="", reply_markup=""):
"""Sends a location. The client will see a map frame with
given location"""
return self.send_request("sendLocation",
{"chat_id": chat_id,
"latitude": latitude,
"longitude": longitude,
"reply_to_message_id": reply_to_message_id,
"reply_to_message_id": reply_markup})
def add_handler(self, handler):
"""Adds a update handler to the current instance."""
if "callback" not in self.handlers:
self.handlers.append(handler)
def remove_handler(self, callback, **kwargs):
"""Checks if the handlers exists and removes it."""
if callback in self.handlers:
self.handlers.remove(callback)
def call_handlers(self, message):
"""Internal function to notifiy handlers based on their
implemented entry points."""
for handler in self.handlers:
for k, v in self.handlerTypeCallback.items():
if (k == "update" or hasattr(message, k))\
and hasattr(handler, v):
try:
getattr(handler, v)(self, message)
except:
print("""Oops, there has been a problem
with this handler : {}""".format(handler))
print(sys.exc_info())
def process_updates(self):
"""Pools updates and dispatches them to the handlers."""
self.loopingUpdateHandler = True
while self.loopingUpdateHandler:
notifications = self.get_updates(self.lastID)
if notifications["ok"] is True:
for notification in notifications['result']:
self.lastID = max(self.lastID, notification["update_id"])+1
message = Message(notification["message"])
self.call_handlers(message)
else:
print("Oops, something went bad : {}".format(notifications))
|
{"/telegram.py": ["/models.py"]}
|
20,357
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/migrations/0006_auto_20150923_2212.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('pootle_vcs', '0005_projectvcs_push_frequency'),
]
operations = [
migrations.RenameField(
model_name='projectvcs',
old_name='pull_frequency',
new_name='fetch_frequency',
),
]
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,358
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/models.py
|
from django.db import models
from pootle_project.models import Project
from pootle_store.models import Store
from . import plugins
class StoreVCS(models.Model):
store = models.ForeignKey(Store, related_name='vcs')
last_sync_revision = models.IntegerField(blank=True, null=True)
last_sync_commit = models.CharField(max_length=32, blank=True, null=True)
path = models.CharField(max_length=32)
@property
def vcs(self):
return self.store.translation_project.project.vcs.get()
@property
def repository_file(self):
return self.vcs.plugin.file_class(
self.vcs,
self.path,
self.store.translation_project.language,
self.store.name,
[s.name for s in self.store.parent.trail()])
class ProjectVCS(models.Model):
project = models.ForeignKey(Project, related_name='vcs')
url = models.URLField()
vcs_type = models.CharField(max_length=32)
enabled = models.BooleanField(default=True)
fetch_frequency = models.IntegerField(default=0)
push_frequency = models.IntegerField(default=0)
pootle_config = models.CharField(max_length=32, default=".pootle.ini")
@property
def plugin(self):
return plugins[self.vcs_type](self)
###########################
# VCS Plugin implementation
def pull(self):
return self.plugin.pull()
def get_latest_commit(self):
return self.plugin.get_latest_commit()
def fetch_translation_files(self):
return self.plugin.fetch_translation_files()
def list_translation_files(self):
return self.plugin.translation_files
def pull_translation_files(self):
return self.plugin.pull_translation_files()
def read_config(self):
return self.plugin.read_config()
def status(self):
return self.plugin.status()
# VCS Plugin implementation
###########################
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
20,359
|
phlax/pootle_vcs
|
refs/heads/master
|
/pootle_vcs/migrations/0008_storevcs.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('pootle_store', '0002_make_suggestion_user_not_null'),
('pootle_vcs', '0007_projectvcs_pootle_config'),
]
operations = [
migrations.CreateModel(
name='StoreVCS',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('last_sync_revision', models.IntegerField(null=True, blank=True)),
('last_sync_commit', models.CharField(max_length=32)),
('store', models.ForeignKey(related_name='vcs', to='pootle_store.Store')),
],
options={
},
bases=(models.Model,),
),
]
|
{"/pootle_vcs/models.py": ["/pootle_vcs/__init__.py"], "/pootle_vcs/management/commands/__init__.py": ["/pootle_vcs/models.py"], "/pootle_vcs/files.py": ["/pootle_vcs/models.py"], "/pootle_vcs/plugins.py": ["/pootle_vcs/files.py", "/pootle_vcs/finder.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/info.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/vcs_commands/info.py", "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py", "/pootle_vcs/management/commands/vcs_commands/files.py", "/pootle_vcs/management/commands/vcs_commands/set_vcs.py", "/pootle_vcs/management/commands/vcs_commands/status.py"], "/pootle_vcs/management/commands/vcs_commands/set_vcs.py": ["/pootle_vcs/__init__.py", "/pootle_vcs/management/commands/__init__.py", "/pootle_vcs/models.py"], "/pootle_vcs/management/commands/vcs_commands/fetch_translations.py": ["/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/__init__.py": ["/pootle_vcs/plugins.py", "/pootle_vcs/files.py"], "/pootle_vcs/management/commands/vcs_commands/status.py": ["/pootle_vcs/models.py", "/pootle_vcs/management/commands/__init__.py"], "/pootle_vcs/management/commands/vcs_commands/files.py": ["/pootle_vcs/management/commands/__init__.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.