index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
10,894
|
callidus/playbot
|
refs/heads/master
|
/playbot/bot.py
|
from __future__ import absolute_import
from __future__ import print_function
import irc.bot
import logging
import re
import ssl
import time
logger = logging.getLogger(__name__)
class PlayBot(irc.bot.SingleServerIRCBot):
def __init__(self, channels, nickname, password, server, port=6667,
force_ssl=False, server_password=None):
if force_ssl or port == 6697:
factory = irc.connection.Factory(wrapper=ssl.wrap_socket)
super(PlayBot, self).__init__(
[(server, port, server_password)],
nickname, nickname,
connect_factory=factory)
else:
super(PlayBot, self).__init__(
[(server, port, server_password)],
nickname, nickname)
self.commands = {}
self.listeners = []
self.channel_list = channels
self.nickname = nickname
self.password = password
def register_command(self, name, obj):
self.commands[name] = obj
def register_listner(self, obj):
self.listeners.append(obj)
def on_nicknameinuse(self, c, e):
logger.info('Nick previously in use, recovering.')
self.nickname = c.get_nickname() + "_"
c.nick(self.nickname)
time.sleep(1)
logger.info('Nick previously in use, recovered.')
def on_welcome(self, c, e):
for channel in self.channel_list:
c.join(channel)
logger.info('Joined channel %s' % channel)
time.sleep(0.5)
def on_privmsg(self, c, e):
e.target = re.sub("!.*", "", e.source)
self.do_command(e)
def on_pubmsg(self, c, e):
if(e.arguments[0].lower().startswith(self.nickname.lower())):
# Remove Name
e.arguments[0] = re.sub("^[\t:]*", "",
e.arguments[0][len(self.nickname):])
self.do_command(e)
else:
try:
for listener in self.listeners:
msg = listener(self, c, e)
if msg is not None:
self.do_send(e.target, msg)
except Exception as err:
logger.warn('Error in listener: %s', err)
def on_dccmsg(self, c, e):
c.privmsg("You said: " + e.arguments[0])
def do_command(self, e):
msg = e.arguments[0].strip().split(" ")
cmd = msg[0].lower()
arg = msg[1:]
if cmd == 'help':
cmdStr = "commands: help " + " ".join(self.commands.keys())
self.do_send(e.target, cmdStr)
elif cmd in self.commands:
c = self.commands[cmd]
try:
c(self, e, cmd, *arg)
except Exception as err:
logger.warn('Error in command: %s %s', str(cmd), err)
self.do_send(e.target, "Huh?")
else:
nick = re.sub("!.*", "", e.source) # Strip IP from nick
c = self.connection
c.notice(nick, "Not understood: " + cmd)
def do_send(self, channel, msg):
logger.info('Sending "%s" to %s' % (msg, channel))
try:
self.connection.privmsg(channel, msg)
time.sleep(0.5)
except Exception:
logger.exception('Exception sending message:')
self.reconnect()
|
{"/playbot/plugins/fortune/fortune.py": ["/playbot/plugins/fortune/__init__.py"], "/playbot/plugins/fortune/build_db.py": ["/playbot/plugins/fortune/__init__.py"]}
|
10,895
|
callidus/playbot
|
refs/heads/master
|
/playbot/plugins/link_peek.py
|
from __future__ import absolute_import
import re
from six.moves.urllib import request
def peek(bot, c, e):
msg = e.arguments[0].strip()
msg = re.search("(http[^ ]*)", msg)
if msg is None:
return
url = msg.group(1)
req = request.Request(url)
response = request.urlopen(req)
the_page = response.read().decode('windows-1252')
title = re.search("<title>([^<]*)</title>", str(the_page)).group(1)
return "Link peek: %s" % title
|
{"/playbot/plugins/fortune/fortune.py": ["/playbot/plugins/fortune/__init__.py"], "/playbot/plugins/fortune/build_db.py": ["/playbot/plugins/fortune/__init__.py"]}
|
10,905
|
a1600012888/AdversarialTraining
|
refs/heads/master
|
/experiments/CIFAR10/trades.train/network.py
|
import config
from base_model.cifar_resnet18 import cifar_resnet18
def create_network():
net = cifar_resnet18(num_class = 10, expansion = 1)
return net
if __name__ == '__main__':
net = create_network()
print(net)
|
{"/lib/attack/__init__.py": ["/lib/attack/attack_base.py"]}
|
10,906
|
a1600012888/AdversarialTraining
|
refs/heads/master
|
/experiments/CIFAR10/cosine.adv.train/config.py
|
from easydict import EasyDict
import sys
import os
import argparse
import numpy as np
import torch
import torch.nn.functional as F
def add_path(path):
if path not in sys.path:
print('Adding {}'.format(path))
sys.path.append(path)
abs_current_path = os.path.realpath('./')
root_path = os.path.join('/', *abs_current_path.split(os.path.sep)[:-3])
lib_dir = os.path.join(root_path, 'lib')
add_path(lib_dir)
from training.config import TrainingConfigBase, SGDOptimizerMaker, \
PieceWiseConstantLrSchedulerMaker, IPGDAttackMethodMaker
class CosineClassificationLoss(torch.nn.modules.loss._Loss):
def __init__(self, class_num = 10, reduction = 'mean'):
super(CosineClassificationLoss, self).__init__()
self.class_num = class_num
self.reduction = reduction
self.cosine_similarity = torch.nn.CosineSimilarity()
def forward(self, pred, target):
one_hot_target = torch.zeros_like(pred)
one_hot_target[list(range(pred.size(0))), target] = 1
minus_cosine_similarity = 1 - self.cosine_similarity(pred, one_hot_target)
if self.reduction == 'mean':
loss = torch.mean(minus_cosine_similarity)
else:
loss = torch.sum(minus_cosine_similarity)
return loss
class TrainingConfing(TrainingConfigBase):
lib_dir = lib_dir
num_epochs = 180
val_interval = 10
create_optimizer = SGDOptimizerMaker(lr =1e-1, momentum = 0.9, weight_decay = 1e-4)
create_lr_scheduler = PieceWiseConstantLrSchedulerMaker(milestones = [70, 120, 150], gamma = 0.1)
create_loss_function = CosineClassificationLoss
create_attack_method = \
IPGDAttackMethodMaker(eps = 8/255.0, sigma = 2/255.0, nb_iters = 10, norm = np.inf,
mean = torch.tensor(np.array([0]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis]),
std = torch.tensor(np.array([1]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis]))
create_evaluation_attack_method = \
IPGDAttackMethodMaker(eps = 8/255.0, sigma = 2/255.0, nb_iters = 20, norm = np.inf,
mean = torch.tensor(np.array([0]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis]),
std = torch.tensor(np.array([1]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis]))
config = TrainingConfing()
# About data
# C.inp_chn = 1
# C.num_class = 10
parser = argparse.ArgumentParser()
parser.add_argument('--resume', default=None, type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('-b', '--batch_size', default=256, type=int,
metavar='N', help='mini-batch size')
parser.add_argument('-d', type=int, default=7, help='Which gpu to use')
parser.add_argument('-freq', '--attack-interval', default=2, type = int,
help = 'Specify how many iterations between two batch of adv images')
parser.add_argument('--auto-continue', default=False, action = 'store_true',
help = 'Continue from the latest checkpoint')
args = parser.parse_args()
if __name__ == '__main__':
pass
|
{"/lib/attack/__init__.py": ["/lib/attack/attack_base.py"]}
|
10,907
|
a1600012888/AdversarialTraining
|
refs/heads/master
|
/lib/attack/__init__.py
|
from .attack_base import clip_eta
|
{"/lib/attack/__init__.py": ["/lib/attack/attack_base.py"]}
|
10,908
|
a1600012888/AdversarialTraining
|
refs/heads/master
|
/experiments/CIFAR10/natural.train/eval.py
|
from config import config
from dataset import create_test_dataset
from network import create_network
from training.train import eval_one_epoch
from utils.misc import load_checkpoint
import argparse
import torch
import numpy as np
import os
parser = argparse.ArgumentParser()
parser.add_argument('--resume', '--resume', default='log/models/last.checkpoint',
type=str, metavar='PATH',
help='path to latest checkpoint (default:log/last.checkpoint)')
parser.add_argument('-d', type=int, default=0, help='Which gpu to use')
args = parser.parse_args()
DEVICE = torch.device('cuda:{}'.format(args.d))
torch.backends.cudnn.benchmark = True
net = create_network()
net.to(DEVICE)
ds_val = create_test_dataset(512)
AttackMethod = config.create_evaluation_attack_method(DEVICE)
if os.path.isfile(args.resume):
load_checkpoint(args.resume, net)
print('Evaluating')
clean_acc, adv_acc = eval_one_epoch(net, ds_val, DEVICE, AttackMethod)
print('clean acc -- {} adv acc -- {}'.format(clean_acc, adv_acc))
|
{"/lib/attack/__init__.py": ["/lib/attack/attack_base.py"]}
|
10,909
|
a1600012888/AdversarialTraining
|
refs/heads/master
|
/lib/training/config.py
|
from abc import ABCMeta, abstractproperty, abstractmethod
from typing import Tuple, List, Dict
import os
import sys
import torch
class TrainingConfigBase(metaclass=ABCMeta):
'''
Base class for training
'''
# directory handling
@property
def abs_current_dir(self):
return os.path.realpath('./')
@property
def log_dir(self):
if not os.path.exists('./log'):
os.mkdir('./log')
return os.path.join(self.abs_current_dir, 'log')
@property
def model_dir(self):
log_dir = self.log_dir
model_dir = os.path.join(log_dir, 'models')
#print(model_dir)
if not os.path.exists(model_dir):
os.mkdir(model_dir)
return model_dir
@abstractproperty
def lib_dir(self):
pass
# training setting
@abstractproperty
def num_epochs(self):
pass
@property
def val_interval(self):
'''
Specify how many epochs between two validation steps
Return <= 0 means no validation phase
'''
return 0
@abstractmethod
def create_optimizer(self, params) -> torch.optim.Optimizer:
'''
params (iterable): iterable of parameters to optimize or dicts defining
parameter groups
'''
pass
@abstractmethod
def create_lr_scheduler(self, optimizer:torch.optim.Optimizer) -> torch.optim.lr_scheduler._LRScheduler:
pass
@abstractmethod
def create_loss_function(self) -> torch.nn.modules.loss._Loss:
pass
def create_attack_method(self, *inputs):
'''
Perform adversarial training against xxx adversary
Return None means natural training
'''
return None
# Evaluation Setting
def create_evaluation_attack_method(self, *inputs):
'''
evaluating the robustness of model against xxx adversary
Return None means only measuring clean accuracy
'''
return None
class SGDOptimizerMaker(object):
def __init__(self, lr = 0.1, momentum = 0.9, weight_decay = 1e-4):
self.lr = lr
self.momentum = momentum
self.weight_decay = weight_decay
def __call__(self, params):
return torch.optim.SGD(params, lr=self.lr, momentum=self.momentum, weight_decay=self.weight_decay)
class PieceWiseConstantLrSchedulerMaker(object):
def __init__(self, milestones:List[int], gamma:float = 0.1):
self.milestones = milestones
self.gamma = gamma
def __call__(self, optimizer):
return torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=self.milestones, gamma=self.gamma)
class IPGDAttackMethodMaker(object):
def __init__(self, eps, sigma, nb_iters, norm, mean, std):
self.eps = eps
self.sigma = sigma
self.nb_iters = nb_iters
self.norm = norm
self.mean = mean
self.std = std
def __call__(self, DEVICE):
father_dir = os.path.join('/', *os.path.realpath(__file__).split(os.path.sep)[:-2])
# print(father_dir)
if not father_dir in sys.path:
sys.path.append(father_dir)
from attack.pgd import IPGD
return IPGD(self.eps, self.sigma, self.nb_iters, self.norm, DEVICE, self.mean, self.std)
|
{"/lib/attack/__init__.py": ["/lib/attack/attack_base.py"]}
|
10,910
|
a1600012888/AdversarialTraining
|
refs/heads/master
|
/lib/attack/attack_base.py
|
import torch
import numpy as np
from abc import ABCMeta, abstractmethod, abstractproperty
class AttackBase(metaclass=ABCMeta):
@abstractmethod
def attack(self, net, inp, label, target = None):
'''
:param inp: batched images
:param target: specify the indexes of target class, None represents untargeted attack
:return: batched adversaril images
'''
pass
@abstractmethod
def to(self, device):
pass
def clip_eta(eta, norm, eps, DEVICE = torch.device('cuda:0')):
'''
helper functions to project eta into epsilon norm ball
:param eta: Perturbation tensor (should be of size(N, C, H, W))
:param norm: which norm. should be in [1, 2, np.inf]
:param eps: epsilon, bound of the perturbation
:return: Projected perturbation
'''
assert norm in [1, 2, np.inf], "norm should be in [1, 2, np.inf]"
with torch.no_grad():
avoid_zero_div = torch.tensor(1e-12).to(DEVICE)
eps = torch.tensor(eps).to(DEVICE)
one = torch.tensor(1.0).to(DEVICE)
if norm == np.inf:
eta = torch.clamp(eta, -eps, eps)
else:
normalize = torch.norm(eta.reshape(eta.size(0), -1), p = norm, dim = -1, keepdim = False)
normalize = torch.max(normalize, avoid_zero_div)
normalize.unsqueeze_(dim = -1)
normalize.unsqueeze_(dim=-1)
normalize.unsqueeze_(dim=-1)
factor = torch.min(one, eps / normalize)
eta = eta * factor
return eta
def test_clip():
a = torch.rand((10, 3, 28, 28)).cuda()
epss = [0.1, 0.5, 1]
norms = [1, 2, np.inf]
for e, n in zip(epss, norms):
print(e, n)
c = clip_eta(a, n, e, True)
print(c)
if __name__ == '__main__':
test_clip()
|
{"/lib/attack/__init__.py": ["/lib/attack/attack_base.py"]}
|
10,911
|
a1600012888/AdversarialTraining
|
refs/heads/master
|
/lib/training/train.py
|
import os
import sys
father_dir = os.path.join('/', *os.path.realpath(__file__).split(os.path.sep)[:-2])
#print(father_dir)
if not father_dir in sys.path:
sys.path.append(father_dir)
from utils.misc import torch_accuracy, AvgMeter
from collections import OrderedDict
import torch
from tqdm import tqdm
def train_one_epoch(net, batch_generator, optimizer,
criterion, DEVICE=torch.device('cuda:0'),
descrip_str='Training', AttackMethod = None, adv_coef = 1.0):
'''
:param attack_freq: Frequencies of training with adversarial examples. -1 indicates natural training
:param AttackMethod: the attack method, None represents natural training
:return: None #(clean_acc, adv_acc)
'''
net.train()
pbar = tqdm(batch_generator)
advacc = -1
advloss = -1
cleanacc = -1
cleanloss = -1
pbar.set_description(descrip_str)
for i, (data, label) in enumerate(pbar):
data = data.to(DEVICE)
label = label.to(DEVICE)
optimizer.zero_grad()
pbar_dic = OrderedDict()
TotalLoss = 0
if AttackMethod is not None:
adv_inp = AttackMethod.attack(net, data, label)
optimizer.zero_grad()
pred = net(adv_inp)
loss = criterion(pred, label)
acc = torch_accuracy(pred, label, (1,))
advacc = acc[0].item()
advloss = loss.item()
TotalLoss = TotalLoss + loss * adv_coef
pred = net(data)
loss = criterion(pred, label)
TotalLoss = TotalLoss + loss
TotalLoss.backward()
#param = next(net.parameters())
#grad_mean = torch.mean(param.grad)
optimizer.step()
acc = torch_accuracy(pred, label, (1,))
cleanacc = acc[0].item()
cleanloss = loss.item()
#pbar_dic['grad'] = '{}'.format(grad_mean)
pbar_dic['Acc'] = '{:.2f}'.format(cleanacc)
pbar_dic['loss'] = '{:.2f}'.format(cleanloss)
pbar_dic['AdvAcc'] = '{:.2f}'.format(advacc)
pbar_dic['Advloss'] = '{:.2f}'.format(advloss)
pbar.set_postfix(pbar_dic)
def eval_one_epoch(net, batch_generator, DEVICE=torch.device('cuda:0'), AttackMethod = None):
net.eval()
pbar = tqdm(batch_generator)
clean_accuracy = AvgMeter()
adv_accuracy = AvgMeter()
pbar.set_description('Evaluating')
for (data, label) in pbar:
data = data.to(DEVICE)
label = label.to(DEVICE)
with torch.no_grad():
pred = net(data)
acc = torch_accuracy(pred, label, (1,))
clean_accuracy.update(acc[0].item())
if AttackMethod is not None:
adv_inp = AttackMethod.attack(net, data, label)
with torch.no_grad():
pred = net(adv_inp)
acc = torch_accuracy(pred, label, (1,))
adv_accuracy.update(acc[0].item())
pbar_dic = OrderedDict()
pbar_dic['CleanAcc'] = '{:.2f}'.format(clean_accuracy.mean)
pbar_dic['AdvAcc'] = '{:.2f}'.format(adv_accuracy.mean)
pbar.set_postfix(pbar_dic)
adv_acc = adv_accuracy.mean if AttackMethod is not None else 0
return clean_accuracy.mean, adv_acc
|
{"/lib/attack/__init__.py": ["/lib/attack/attack_base.py"]}
|
10,912
|
a1600012888/AdversarialTraining
|
refs/heads/master
|
/experiments/CIFAR10/adversarial.train/network.py
|
import config
from base_model.cifar_resnet18 import cifar_resnet18
def create_network():
net = cifar_resnet18()
return net
if __name__ == '__main__':
net = create_network()
print(net)
|
{"/lib/attack/__init__.py": ["/lib/attack/attack_base.py"]}
|
10,913
|
a1600012888/AdversarialTraining
|
refs/heads/master
|
/experiments/CIFAR10/trades.train/config.py
|
from easydict import EasyDict
import sys
import os
import argparse
import numpy as np
import torch
def add_path(path):
if path not in sys.path:
print('Adding {}'.format(path))
sys.path.append(path)
abs_current_path = os.path.realpath('./')
root_path = os.path.join('/', *abs_current_path.split(os.path.sep)[:-3])
lib_dir = os.path.join(root_path, 'lib')
add_path(lib_dir)
from training.config import TrainingConfigBase, SGDOptimizerMaker, \
PieceWiseConstantLrSchedulerMaker, IPGDAttackMethodMaker
class IPGDTradesMaker(object):
def __init__(self, eps, sigma, nb_iters, norm, mean, std):
self.eps = eps
self.sigma = sigma
self.nb_iters = nb_iters
self.norm = norm
self.mean = mean
self.std = std
def __call__(self, DEVICE):
from attack.pgd_trades import IPGDTrades
return IPGDTrades(self.eps, self.sigma, self.nb_iters, self.norm, DEVICE, self.mean, self.std)
class TrainingConfing(TrainingConfigBase):
lib_dir = lib_dir
num_epochs = 110
val_interval = 10
alpha = 1.0
create_optimizer = SGDOptimizerMaker(lr =1e-1, momentum = 0.9, weight_decay = 2e-4)
create_lr_scheduler = PieceWiseConstantLrSchedulerMaker(milestones = [70, 90, 100], gamma = 0.1)
create_loss_function = torch.nn.CrossEntropyLoss
create_attack_method = \
IPGDTradesMaker(eps = 8/255.0, sigma = 2/255.0, nb_iters = 10, norm = np.inf,
mean = torch.tensor(np.array([0, 0, 0]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis]),
std = torch.tensor(np.array([1,1,1]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis]))
create_evaluation_attack_method = \
IPGDAttackMethodMaker(eps = 8/255.0, sigma = 2/255.0, nb_iters = 20, norm = np.inf,
mean=torch.tensor(
np.array([0, 0, 0]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis]),
std=torch.tensor(
np.array([1, 1, 1]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis]))
config = TrainingConfing()
# About data
# C.inp_chn = 1
# C.num_class = 10
parser = argparse.ArgumentParser()
parser.add_argument('--resume', default=None, type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('-b', '--batch_size', default=128, type=int,
metavar='N', help='mini-batch size')
parser.add_argument('-d', type=int, default=2, help='Which gpu to use')
parser.add_argument('-freq', '--attack-interval', default=2, type = int,
help = 'Specify how many iterations between two batch of adv images')
parser.add_argument('--auto-continue', default=False, action = 'store_true',
help = 'Continue from the latest checkpoint')
args = parser.parse_args()
if __name__ == '__main__':
pass
|
{"/lib/attack/__init__.py": ["/lib/attack/attack_base.py"]}
|
10,914
|
a1600012888/AdversarialTraining
|
refs/heads/master
|
/lib/attack/pgd.py
|
'''
Reference:
[1] Towards Deep Learning Models Resistant to Adversarial Attacks
Aleksander Madry, Aleksandar Makelov, Ludwig Schmidt, Dimitris Tsipras, Adrian Vladu
arXiv:1706.06083v3
'''
import torch
import numpy as np
import os
import sys
father_dir = os.path.join('/', *os.path.realpath(__file__).split(os.path.sep)[:-2])
if not father_dir in sys.path:
sys.path.append(father_dir)
from attack.attack_base import AttackBase, clip_eta
class IPGD(AttackBase):
# ImageNet pre-trained mean and std
# _mean = torch.tensor(np.array([0.485, 0.456, 0.406]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis])
# _std = torch.tensor(np.array([0.229, 0.224, 0.225]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis])
# _mean = torch.tensor(np.array([0]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis])
# _std = torch.tensor(np.array([1.0]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis])
def __init__(self, eps = 6 / 255.0, sigma = 3 / 255.0, nb_iter = 20,
norm = np.inf, DEVICE = torch.device('cpu'),
mean = torch.tensor(np.array([0]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis]),
std = torch.tensor(np.array([1.0]).astype(np.float32)[np.newaxis, :, np.newaxis, np.newaxis])):
'''
:param eps: maximum distortion of adversarial examples
:param sigma: single step size
:param nb_iter: number of attack iterations
:param norm: which norm to bound the perturbations
'''
self.eps = eps
self.sigma = sigma
self.nb_iter = nb_iter
self.norm = norm
self.criterion = torch.nn.CrossEntropyLoss().to(DEVICE)
self.DEVICE = DEVICE
self._mean = mean.to(DEVICE)
self._std = std.to(DEVICE)
def single_attack(self, net, inp, label, eta, target = None):
'''
Given the original image and the perturbation computed so far, computes
a new perturbation.
:param net:
:param inp: original image
:param label:
:param eta: perturbation computed so far
:return: a new perturbation
'''
adv_inp = inp + eta
net.zero_grad()
pred = net(adv_inp)
if target is not None:
targets = torch.sum(pred[:, target])
grad_sign = torch.autograd.grad(targets, adv_in, only_inputs=True, retain_graph = False)[0].sign()
else:
loss = self.criterion(pred, label)
grad_sign = torch.autograd.grad(loss, adv_inp,
only_inputs=True, retain_graph = False)[0].sign()
adv_inp = adv_inp + grad_sign * (self.sigma / self._std)
tmp_adv_inp = adv_inp * self._std + self._mean
tmp_inp = inp * self._std + self._mean
tmp_adv_inp = torch.clamp(tmp_adv_inp, 0, 1) ## clip into 0-1
#tmp_adv_inp = (tmp_adv_inp - self._mean) / self._std
tmp_eta = tmp_adv_inp - tmp_inp
tmp_eta = clip_eta(tmp_eta, norm=self.norm, eps=self.eps, DEVICE=self.DEVICE)
eta = tmp_eta/ self._std
return eta
def attack(self, net, inp, label, target = None):
eta = torch.zeros_like(inp)
eta = eta.to(self.DEVICE)
net.eval()
inp.requires_grad = True
eta.requires_grad = True
for i in range(self.nb_iter):
eta = self.single_attack(net, inp, label, eta, target)
#print(i)
#print(eta.max())
adv_inp = inp + eta
tmp_adv_inp = adv_inp * self._std + self._mean
tmp_adv_inp = torch.clamp(tmp_adv_inp, 0, 1)
adv_inp = (tmp_adv_inp - self._mean) / self._std
return adv_inp
def to(self, device):
self.DEVICE = device
self._mean = self._mean.to(device)
self._std = self._std.to(device)
self.criterion = self.criterion.to(device)
def test_IPGD():
pass
if __name__ == '__main__':
test_IPGD()
|
{"/lib/attack/__init__.py": ["/lib/attack/attack_base.py"]}
|
10,915
|
a1600012888/AdversarialTraining
|
refs/heads/master
|
/experiments/CIFAR10/trades.train/training_function.py
|
import os
import sys
import config
from utils.misc import torch_accuracy, AvgMeter
from collections import OrderedDict
import torch
from tqdm import tqdm
import torch.nn.functional as F
def train_one_epoch(net, batch_generator, optimizer,
criterion, DEVICE=torch.device('cuda:0'),
descrip_str='Training', AttackMethod = None, alpha = 1):
'''
:param AttackMethod: the attack method, None represents natural training
:param alpha: weight coeffcient for mig loss
:return: None #(clean_acc, adv_acc)
'''
#assert callable(AttackMethod)
net.train()
pbar = tqdm(batch_generator)
advacc = -1
advloss = -1
cleanacc = -1
cleanloss = -1
criterion_kl = torch.nn.KLDivLoss(size_average=False).to(DEVICE)
pbar.set_description(descrip_str)
for i, (data, label) in enumerate(pbar):
data = data.to(DEVICE)
label = label.to(DEVICE)
optimizer.zero_grad()
pbar_dic = OrderedDict()
adv_inp = AttackMethod.attack(net, data, label)
optimizer.zero_grad()
pred1 = net(adv_inp)
pred2 = net(data)
loss_robust = criterion_kl(F.log_softmax(pred1, dim=1), F.softmax(pred2, dim = 1))
loss_natural = criterion(pred2, label)
TotalLoss = loss_natural + alpha * loss_robust
TotalLoss.backward()
acc = torch_accuracy(pred1, label, (1,))
advacc = acc[0].item()
advloss = loss_robust.item()
acc = torch_accuracy(pred2, label, (1,))
cleanacc = acc[0].item()
cleanloss = loss_natural.item()
param = next(net.parameters())
grad_mean = torch.mean(param.grad)
optimizer.step()
pbar_dic['grad'] = '{}'.format(grad_mean)
pbar_dic['cleanAcc'] = '{:.2f}'.format(cleanacc)
pbar_dic['cleanloss'] = '{:.2f}'.format(cleanloss)
pbar_dic['AdvAcc'] = '{:.2f}'.format(advacc)
pbar_dic['Robloss'] = '{:.2f}'.format(advloss)
pbar.set_postfix(pbar_dic)
def eval_one_epoch(net, batch_generator, DEVICE=torch.device('cuda:0'), AttackMethod = None):
net.eval()
pbar = tqdm(batch_generator)
clean_accuracy = AvgMeter()
adv_accuracy = AvgMeter()
pbar.set_description('Evaluating')
for (data, label) in pbar:
data = data.to(DEVICE)
label = label.to(DEVICE)
with torch.no_grad():
pred = net(data)
acc = torch_accuracy(pred, label, (1,))
clean_accuracy.update(acc[0].item())
if AttackMethod is not None:
adv_inp = AttackMethod.attack(net, data, label)
with torch.no_grad():
pred = net(adv_inp)
acc = torch_accuracy(pred, label, (1,))
adv_accuracy.update(acc[0].item())
pbar_dic = OrderedDict()
pbar_dic['CleanAcc'] = '{:.2f}'.format(clean_accuracy.mean)
pbar_dic['AdvAcc'] = '{:.2f}'.format(adv_accuracy.mean)
pbar.set_postfix(pbar_dic)
adv_acc = adv_accuracy.mean if AttackMethod is not None else 0
return clean_accuracy.mean, adv_acc
|
{"/lib/attack/__init__.py": ["/lib/attack/attack_base.py"]}
|
10,916
|
anshuman73/facematch
|
refs/heads/master
|
/app.py
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_admin import Admin
import face_match
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgres://postgres:No1cancrackthis@localhost:5432/facematch'
app.config['UPLOAD_FOLDER'] = '/uploads'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.secret_key = 'this_is_a_secret_key'
db = SQLAlchemy(app)
admin = Admin(app, name='FaceMatch', template_mode='bootstrap3')
from views import *
from models import *
from admin import *
db.create_all()
if __name__ == '__main__':
app.run()
|
{"/app.py": ["/face_match.py", "/views.py", "/models.py", "/admin.py"], "/views.py": ["/app.py", "/models.py", "/face_match.py"], "/admin.py": ["/app.py", "/models.py"], "/models.py": ["/app.py"]}
|
10,917
|
anshuman73/facematch
|
refs/heads/master
|
/views.py
|
from app import app, db
from flask import session, redirect, url_for, render_template, abort, request, flash
from forms import LoginForm
from models import Student, Teacher, Course, Class, Attendance
import os
from werkzeug.utils import secure_filename
from face_match import give_match
ALLOWED_EXTENSIONS = ['png', 'jpg', 'jpeg', 'gif']
@app.route('/index', methods=['GET'])
@app.route('/', methods=['GET'])
def index():
if session.get('logged_in') and session.get('username'):
if session.get('role') == 'teacher':
return redirect(url_for('teacher_dashboard'))
elif session.get('role') == 'student':
return redirect((url_for('student_dashboard')))
else:
return redirect(url_for('login'))
@app.route('/login', methods=['GET'])
def login():
if session.get('logged_in') and session.get('username'):
return redirect(url_for('dashboard'))
else:
return render_template('main_login.html')
@app.route('/login/<role>', methods=['GET', 'POST'])
def login_role(role):
form = LoginForm()
if form.validate_on_submit():
username, password = form.username.data, form.password.data
# TODO: Actually verify password
if role == 'student':
student = Student.query.filter_by(username=username).first()
if student:
session['logged_in'] = True
session['username'] = username
session['role'] = 'student'
return redirect(url_for('student_dashboard'))
else:
form.username.errors.append('Unknown username')
return render_template('student_login.html', form=form)
elif role == 'teacher':
teacher = Teacher.query.filter_by(username=username).first()
if teacher:
session['logged_in'] = True
session['username'] = username
session['role'] = 'teacher'
return redirect(url_for('teacher_dashboard'))
else:
form.username.errors.append('Unknown username')
return render_template('teacher_login.html', form=form)
else:
return abort(403)
else:
if role == 'student':
return render_template('student_login.html', form=form)
elif role == 'teacher':
return render_template('teacher_login.html', form=form)
else:
return abort(404)
@app.route('/logout', methods=['GET', 'POST'])
def logout():
session.clear()
return redirect(url_for('login'))
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
@app.route('/teacher', methods=['GET'])
def teacher_dashboard():
if session.get('role') == 'teacher':
username = session.get('username')
teacher = Teacher.query.filter_by(username=username).first()
courses = teacher.courses_taught
return render_template('teacher_dashboard.html', courses=courses)
else:
return abort(403)
@app.route('/teacher/course/<course_name>', methods=['GET', 'POST'])
def get_course(course_name):
if session.get('role') == 'teacher':
username = session.get('username')
teacher = Teacher.query.filter_by(username=username).first()
course = Course.query.filter_by(teacher=teacher).filter_by(course_name=course_name).first()
if course:
classes = course.classes
return render_template('course_timings.html', course=course, classes=classes)
else:
return abort(403)
else:
return abort(403)
@app.route('/teacher/course/<course_name>/<class_date>', methods=['GET', 'POST'])
def get_class(course_name, class_date):
if request.method == 'POST':
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(os.getcwd() + '/' + app.config['UPLOAD_FOLDER'], filename))
people_found = give_match(os.path.join(os.getcwd() + '/' + app.config['UPLOAD_FOLDER'], filename))
username = session.get('username')
teacher = Teacher.query.filter_by(username=username).first()
course = Course.query.filter_by(teacher=teacher).filter_by(course_name=course_name).first()
the_class = Class.query.filter_by(course=course).filter_by(date=class_date).first()
for people in people_found:
people = people.strip()
print(people)
student = Student.query.filter_by(full_name=people).first()
if student:
attendance = Attendance(course.id, the_class.id, student.id, True)
db.session.add(attendance)
db.session.commit()
else:
print('Student not detected')
return redirect(request.url)
else:
if session.get('role') == 'teacher':
username = session.get('username')
teacher = Teacher.query.filter_by(username=username).first()
course = Course.query.filter_by(teacher=teacher).filter_by(course_name=course_name).first()
if course:
the_class = Class.query.filter_by(course=course).filter_by(date=class_date).first()
attendance = the_class.attendance
return render_template('class.html', course=course, the_class=the_class, attendances=attendance)
else:
return abort(403)
else:
return abort(403)
|
{"/app.py": ["/face_match.py", "/views.py", "/models.py", "/admin.py"], "/views.py": ["/app.py", "/models.py", "/face_match.py"], "/admin.py": ["/app.py", "/models.py"], "/models.py": ["/app.py"]}
|
10,918
|
anshuman73/facematch
|
refs/heads/master
|
/admin.py
|
from app import admin, db
from flask_admin.contrib.sqla import ModelView
from models import Student, Teacher, Course, Class, Attendance
admin.add_view(ModelView(Student, db.session))
admin.add_view(ModelView(Teacher, db.session))
admin.add_view(ModelView(Course, db.session))
admin.add_view(ModelView(Class, db.session))
admin.add_view(ModelView(Attendance, db.session))
|
{"/app.py": ["/face_match.py", "/views.py", "/models.py", "/admin.py"], "/views.py": ["/app.py", "/models.py", "/face_match.py"], "/admin.py": ["/app.py", "/models.py"], "/models.py": ["/app.py"]}
|
10,919
|
anshuman73/facematch
|
refs/heads/master
|
/face_match.py
|
import face_recognition
from PIL import Image
import numpy as np
import os
known_faces = []
known_faces_names = []
working_dir = os.getcwd() + '/' + 'Faces'
for file in os.listdir(working_dir):
known_faces.append((face_recognition.face_encodings(face_recognition.load_image_file(working_dir + '/' + file))[0]))
known_faces_names.append(file.rsplit('.', 1)[0])
def give_match(file_path):
unknown_faces = face_recognition.face_encodings(face_recognition.load_image_file(file_path))
people_found = []
print(known_faces_names)
for face in unknown_faces:
face_distances = face_recognition.face_distance(known_faces, face)
face_distances = ['{0:.2f}'.format((1-x) * 100) for x in face_distances]
print(face_distances)
max_index = face_distances.index(max(face_distances))
max_match_person = known_faces_names[max_index]
people_found.append(max_match_person)
return people_found
|
{"/app.py": ["/face_match.py", "/views.py", "/models.py", "/admin.py"], "/views.py": ["/app.py", "/models.py", "/face_match.py"], "/admin.py": ["/app.py", "/models.py"], "/models.py": ["/app.py"]}
|
10,920
|
anshuman73/facematch
|
refs/heads/master
|
/models.py
|
from app import db
class User(object):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.UnicodeText, unique=True)
full_name = db.Column(db.UnicodeText)
class Teacher(db.Model, User):
courses_taught = db.relationship('Course', backref='teacher', lazy=True)
courses = db.Table('courses',
db.Column('course_id', db.Integer, db.ForeignKey('course.id'), primary_key=True),
db.Column('student_id', db.Integer, db.ForeignKey('student.id'), primary_key=True)
)
class Student(db.Model, User):
courses_taken = db.relationship('Course', secondary=courses, lazy='subquery',
backref=db.backref('students', lazy=True))
attendance = db.relationship('Attendance', backref='student', lazy=True)
class Course(db.Model):
id = db.Column(db.Integer, primary_key=True)
course_name = db.Column(db.UnicodeText)
course_teacher_id = db.Column(db.Integer, db.ForeignKey('teacher.id'), nullable=False)
classes = db.relationship('Class', backref='course', lazy=True)
class Class(db.Model):
id = db.Column(db.Integer, primary_key=True)
course_id = db.Column(db.Integer, db.ForeignKey('course.id'), nullable=False)
start_time = db.Column(db.DateTime)
end_time = db.Column(db.DateTime)
date = db.Column(db.Date)
attendance = db.relationship('Attendance', backref='class', lazy=True)
class Attendance(db.Model):
id = db.Column(db.Integer, primary_key=True)
course_id = db.Column(db.Integer, db.ForeignKey('course.id'), nullable=False)
class_id = db.Column(db.Integer, db.ForeignKey('class.id'), nullable=False)
student_id = db.Column(db.Integer, db.ForeignKey('student.id'), nullable=False)
attended = db.Column(db.Boolean, default=False)
def __init__(self, course_id, class_id, student_id, attended=True):
self.class_id = class_id
self.course_id = course_id
self.student_id = student_id
self.attended = attended
|
{"/app.py": ["/face_match.py", "/views.py", "/models.py", "/admin.py"], "/views.py": ["/app.py", "/models.py", "/face_match.py"], "/admin.py": ["/app.py", "/models.py"], "/models.py": ["/app.py"]}
|
10,947
|
rigas-IC/2D_LANGEVIN_RL_CONTROL
|
refs/heads/master
|
/EVAL_ENV.py
|
from LANGEVIN2D_ENV import Langevin2D_Env
import numpy as np
import matplotlib.pyplot as plt
from scipy import signal
# Environment Parameters
env_params = {
"dt": 0.0005,
"T" : 100.0,
"a" : 1.0 +1.0j,
"b" : -5.0e2,
"D" : 0.0e-4,
"x0": 0.04472135955 + 0.0j
}
# Path to save the figure
#fig_path = 'figures/FeedbackControl_Kp5_mag_1_D_0_gr_10_wn_10.png'
#fig_path = 'figures/NoControl_D_1em4_gr_10_wn_10.png'
fig_path = None
# Create instance of complex Stuart-Landau equation environment
environment = Langevin2D_Env()
environment.env_params = env_params
# Initiate environment to initial state
time = np.zeros((environment.max_episode_timesteps()))
states = np.zeros((environment.max_episode_timesteps(),2))
actions = np.zeros((environment.max_episode_timesteps(),2))
state = environment.reset()
states[0,:] = state["observation"]
# Episode reward - defined as magnitude of the complex state
sum_rewards = 0.0
# Set up control time with reference to simulation time
dt = environment.env_params["dt"]
dt_action = 0.05
T = environment.env_params["T"]
n_env_steps = int(dt_action / dt)
n_actions = int(T/dt/n_env_steps)
# Proportional gain - If using feedback control
Kp_r = 0.0
Kp_i = 0.0
max_forcing_mag = 1.0
observation = states[0,:]
# March system for specified number of timesteps
for ii in range(0,n_actions):
p_control = np.array([-Kp_r*observation[0] , -Kp_i*observation[1]])
for jj in range(0,n_env_steps):
actions[jj + ii*n_env_steps,:] = np.clip(p_control, -max_forcing_mag, max_forcing_mag)
state, terminal, reward = environment.execute(actions= p_control)
observation = state["observation"]
states[jj + ii*n_env_steps,:] = observation
time[jj + ii*n_env_steps] = environment.time
sum_rewards += reward
# Compute and output episode metrics
print('Episode cumulative reward: {} - Average reward: {}'.format(sum_rewards, sum_rewards/environment.max_episode_timesteps()))
fig = plt.figure(figsize=(16,9))
fig.tight_layout()
if (Kp_i == 0 and Kp_r == 0):
fig.suptitle('No Control - Episode cumulative reward: {} - Average reward: {}'.format(sum_rewards, sum_rewards/environment.max_episode_timesteps()))
else:
fig.suptitle('Proportional Feedback Control Kp_r={}, Kp_i={} - Episode cumulative reward: {} - Average reward: {}'.format(Kp_r, Kp_i, sum_rewards, sum_rewards/environment.max_episode_timesteps()))
plt.subplots_adjust(top=0.925, bottom=0.05, right=0.95, left=0.05, hspace=0.5)
# 2D Histogram (PDF) of the state
nbins = 200
N_2D, x_edges, y_edges = np.histogram2d(states[:,0],states[:,1], np.array([nbins,2*nbins]))
PDF_2D = N_2D / environment.max_episode_timesteps() / (x_edges[1]-x_edges[0]) / (y_edges[1]-y_edges[0])
# Plot 2D PDF as pcolormesh
X,Y = np.meshgrid(x_edges, y_edges)
ax0 = plt.subplot2grid(shape=(4,2), loc=(0,0), rowspan=2, colspan= 1)
im = ax0.pcolormesh(X, Y, PDF_2D.T, cmap= plt.get_cmap('hot_r'))
fig.colorbar(im, ax = ax0)
ax0.set_title('2D PDF of the system states')
ax0.set_xlabel('Re(x)')
ax0.set_ylabel('Im(x)')
# 1D PDF
N_1D_re , x_edges_1D_re = np.histogram(states[:,0],bins = nbins)
PDF_1D_re = N_1D_re / environment.max_episode_timesteps() / (x_edges_1D_re[1] - x_edges_1D_re[0])
N_1D_im , x_edges_1D_im = np.histogram(states[:,1],bins = nbins)
PDF_1D_im = N_1D_im / environment.max_episode_timesteps() / (x_edges_1D_im[1] - x_edges_1D_im[0])
# Plot 1D PDF
ax1 = plt.subplot2grid(shape=(4,2), loc=(2,0), rowspan=1, colspan= 1)
ax1.plot(x_edges_1D_re[:-1], PDF_1D_re)
ax1.set_title('1D PDF of the real component of the state')
ax1.set_xlabel('Re(x)')
ax1.set_ylabel('P(Re(x))')
ax2 = plt.subplot2grid(shape=(4,2), loc=(3,0), rowspan=1, colspan= 1)
ax2.plot(x_edges_1D_im[:-1], PDF_1D_im)
ax2.set_title('1D PDF of the imaginary component of the state')
ax2.set_xlabel('Im(x)')
ax2.set_ylabel('P(Im(x))')
# Estimate power spectral density using Welch method
n_window = int(environment.max_episode_timesteps()/10)
Fs = 1/environment.env_params["dt"]
window = signal.get_window('hann', n_window)
f_re , PSD_re = signal.welch(states[:,0], fs= Fs, window= window, noverlap= 0.5*n_window, nfft= n_window)
f_im , PSD_im = signal.welch(states[:,1], fs= Fs, window= window, noverlap= 0.5*n_window, nfft= n_window)
# Plot PSD
ax3 = plt.subplot2grid(shape=(4,2), loc=(0,1), rowspan=2, colspan= 1)
ax3.loglog(f_re, PSD_re)
ax3.loglog(f_im, PSD_im)
ax3.set_title('Power Spectral Density of the state')
ax3.set_xlabel('f [Hz]')
ax3.set_ylabel('S(x)')
ax3.legend(('Real', 'Imaginary'))
# Plot trajectory of system
ax4 = plt.subplot2grid(shape=(4,2), loc=(2,1), rowspan=1, colspan= 1)
ax4.plot(time, states[:,0])
ax4.plot(time, states[:,1])
ax4.set_title('Trajectory of the state')
ax4.set_xlabel('t')
ax4.set_ylabel('x')
ax4.legend(('Real', 'Imaginary'))
# Plot control input to the system
ax5 = plt.subplot2grid(shape=(4,2), loc=(3,1), rowspan=1, colspan= 1)
ax5.plot(time, actions[:,0])
ax5.plot(time, actions[:,1])
ax5.set_title('Control input to the system - Controller time rate: {}'.format(dt_action))
ax5.set_xlabel('t')
ax5.set_ylabel('u')
ax5.legend(('Real', 'Imaginary'))
if fig_path is not None:
fig.savefig(fig_path)
plt.show()
|
{"/EVAL_ENV.py": ["/LANGEVIN2D_ENV.py"], "/TRAIN_AGENT.py": ["/LANGEVIN2D_ENV.py"]}
|
10,948
|
rigas-IC/2D_LANGEVIN_RL_CONTROL
|
refs/heads/master
|
/TRAIN_AGENT.py
|
import os
import sys
import numpy as np
import json
from tensorforce.agents import Agent
from tensorforce.environments import Environment
from tensorforce.execution import Runner
from LANGEVIN2D_ENV import Langevin2D_Env
###############################################################################
# PARAMETERS
###############################################################################
# Parallel
num_env = 8
# Saver directory
directory = os.path.join(os.getcwd(), 'agents' ,'saver_data_D_0_dta_0p05_maxa_1_ep100_lstm2_32_dense_64_gr_1_wn_1_r_ma1em2')
# Environment Parameters
env_params = {
"dt": 0.0005,
"T" : 100.0,
"a" : 1.0 + 1.0j,
"b" : -5.0e2,
"D" : 0.0e-4,
"x0": None
}
# Controller Parameters
optimization_params = {
"min_value_forcing": -1.0,
"max_value_forcing": 1.0
}
# Training Parameters
training_params = {
"num_episodes" : 400,
"dt_action" : 0.05
}
# Compute environment and action input timesteps
n_env_steps = int(training_params["dt_action"] / env_params["dt"])
max_episode_timesteps = int(env_params["T"]/env_params["dt"]/n_env_steps)
# Create and instance of the complex Stuart-Landau environment
environment = Langevin2D_Env(n_env_steps = n_env_steps)
environment.env_params = env_params
environment.optimization_params = optimization_params
environments = []
for env in range(num_env):
environments.append(Langevin2D_Env(n_env_steps = n_env_steps))
environments[env].env_params = env_params
environments[env].optimization_params = optimization_params
###############################################################################
# ACTOR/CRITIC NETWORK DEFINITIONS
###############################################################################
# Specify network architecture
# DENSE LAYERS
# actor_network = [
# dict(type='retrieve', tensors='observation'),
# dict(type='dense', size=2),
# ]
# LSTM
actor_network = [
[
dict(type='retrieve', tensors='observation'),
dict(type='internal_lstm', size=32, length=2, bias=False),
dict(type='register' , tensor ='intermed-1')
],
[
dict(type='retrieve', tensors='prev_action'),
dict(type='internal_lstm', size=32, length=2, bias=False),
dict(type='register' , tensor ='intermed-2')
],
[
dict(type='retrieve', tensors=['intermed-1','intermed-2'], aggregation='concat'),
dict(type='dense', size=64),
]
]
critic_network = actor_network
###############################################################################
# AGENT DEFINITION
###############################################################################
# Specify the agent parameters - PPO algorithm
agent = Agent.create(
# Agent + Environment
agent='ppo', # Agent specification
environment=environment, # Environment object
exploration=0.0,
# Network
network=actor_network, # Policy NN specification
# Optimization
batch_size=num_env, # Number of episodes per update batch
learning_rate=1e-4, # Optimizer learning rate
subsampling_fraction=0.33, # Fraction of batch timesteps to subsample
optimization_steps=25,
# Reward estimation
likelihood_ratio_clipping=0.2, # The epsilon of the ppo CLI objective
estimate_terminal=False, # Whether to estimate the value of terminal states
# TODO: gae_lambda=0.97 doesn't currently exist - ???
# Critic
critic_network=critic_network, # Critic NN specification
critic_optimizer=dict(
type='multi_step', num_steps=5,
optimizer=dict(type='adam', learning_rate=1e-4)
),
# Regularization
entropy_regularization=0.01, # To discourage policy from being too 'certain'
# Parallel
parallel_interactions=num_env,
# TensorFlow
saver=dict(directory=directory, filename="agent"), # TensorFlow saver configuration for periodic implicit saving
# TensorBoard Summarizer
#summarizer=dict(directory=os.path.join(directory, 'summarizer') , labels="all")
)
###############################################################################
# TRAINING
###############################################################################
# Runner definition - Serial runner
runner = Runner(
environments=environments,
agent=agent,
remote="multiprocessing",
max_episode_timesteps=max_episode_timesteps,
#evaluation=True
)
# Proceed to training
runner.run(
num_episodes=training_params["num_episodes"],
sync_episodes=True,
#save_best_agent=os.path.join(os.getcwd(), 'best_agent')
)
agent.save()
runner.close()
|
{"/EVAL_ENV.py": ["/LANGEVIN2D_ENV.py"], "/TRAIN_AGENT.py": ["/LANGEVIN2D_ENV.py"]}
|
10,949
|
rigas-IC/2D_LANGEVIN_RL_CONTROL
|
refs/heads/master
|
/LANGEVIN2D_ENV.py
|
import numpy as np
from tensorforce.environments import Environment
class Langevin2D_Env(Environment):
'''
Defines the parameters for the 2D Langevin Stuart Landau equation
dt (float) - time step for dynamics evolution - default: 0.0005
T (float) - total simulation time - default: 100.0
a (complex float) - Re(a): growth rate of the dynamics - Im(a): angular frequency at equilibrium - default: 10.0 +0.0j
b (complex float) - Re(b): saturation term - Im(b): strength of the non-linear coupling between amplitude and frequency - default: -5.0e2
D (float) - Diffusion coefficient associated with Gaussian White Noise forcing - default: 1.0e-2
x0 (Complex float) - Initial position of the system - default: 0.03 + 0.0j
'''
env_params = {
"dt": 0.0005,
"T" : 100.0,
"a" : 1.0 +1.0j,
"b" : -5.0e2,
"D" : 1.0e-1,
"x0": 0.03 + 0.0j
}
optimization_params = {
"min_value_forcing": -1.0,
"max_value_forcing": 1.0
}
def __init__(self, n_env_steps = 1):
super().__init__()
self.state = 0.0 + 0.0j # Internal state of the syste,
self.time = 0.0 # Internal time of the system
self.n = 0 # Step number
self.N = int(self.env_params["T"] / self.env_params["dt"]) # Maximum number of steps to take
self.n_env_steps = n_env_steps # Number of environment steps to march the system between actions
print(self.N)
def states(self):
'''
Returns the state space specification.
:return: dictionary of state descriptions with the following attributes:
type ("bool" / "int" / "float") – state data type (required)
shape (int > 0 / iter[int > 0]) – state shape (default: scalar)
'''
return dict(observation = dict(type='float', shape=(2,)), prev_action = dict(type='float', shape=(2,)))
def actions(self):
'''
Returns the action space specification.
:return: dictionary of action descriptions with the following attributes:
type ("bool" / "int" / "float") – action data type (required)
shape (int > 0 / iter[int > 0]) – action shape
min_value/max_value (float) – minimum/maximum action value
'''
return dict(type='float', shape=(2,),
min_value=self.optimization_params["min_value_forcing"],
max_value=self.optimization_params["max_value_forcing"])
def reset(self):
"""
Reset environment and setup for new episode.
Returns:
initial state of reset environment.
"""
# Reset simulation time
self.time = 0.0
self.n = 0
# Reset environment to initial position
if self.env_params["x0"] is not None:
self.state = self.env_params["x0"]
else:
# Initial position on limit-cycle
eq = np.sqrt(-np.real(self.env_params["a"])/np.real(self.env_params["b"]))
self.state = eq*np.exp(np.random.normal(scale= 0.5*np.pi)*1j)
print(self.state)
self.N = int(self.env_params["T"] / self.env_params["dt"]) # Maximum number of steps to take
next_state = dict(
observation = np.array([np.real(self.state),np.imag(self.state)]).flatten(),
prev_action = np.zeros((2,))
)
return(next_state)
def execute(self, actions = np.array([0.0,0.0])):
'''
Run solver for one action step, until next RL env state (this means to run for number_steps_execution)
:param: actions
:return: next state (state value at end of action step)
terminal
reward (magnitude of the state)
'''
action = actions[0] + actions[1]*1j
# Parameters of the system
a = self.env_params["a"]
b = self.env_params["b"]
D = self.env_params["D"]
# Solver parameters
dt = self.env_params["dt"]
# Gaussian White Noise forcing
sigma = np.sqrt(2*D) # STD of stochastic forcing
# March the system using Euler-Maruyama method for discretization
# The system will evolve by n_env_steps steps between control input
cum_reward = 0.0
for _ in range(self.n_env_steps):
An = np.random.normal(0.0,sigma) + np.random.normal(0.0,sigma)*1j
# Deterministic component of system: complex Stuart-Landau equation
SL_deterministic = a * self.state + b * self.state * np.square(np.abs(self.state))
self.time = self.time + dt
self.state = self.state + SL_deterministic * dt + action * dt + An * np.sqrt(dt)
self.n += 1
cum_reward -= np.abs(self.state)
# Extract Real and Imaginary part of state as two separate states
# Ensure reshape to size (2,)
next_state = dict(
observation = np.array([np.real(self.state),np.imag(self.state)]).reshape(2,),
prev_action = actions
)
terminal = False
# Reward based on magnitude of the state
#reward = -np.abs(self.state)
# Reward based on average magnitude of the state
#reward =cum_reward / self.n_env_steps
# Reward based on average magnitude of the state and action input penalization
reward = cum_reward / self.n_env_steps - 1e-2*(np.abs(action) / self.optimization_params["max_value_forcing"])
# Print completion status to console
if (self.n % (self.N/20) == 0):
print(self.n)
return (next_state, terminal, reward)
def max_episode_timesteps(self):
N = int(self.env_params["T"] / self.env_params["dt"])
return N
if __name__ == "__main__":
env = Langevin2D_Env()
next_state = env.reset()
print(next_state)
next_state, terminal, reward = env.execute()
print(next_state, terminal, reward)
|
{"/EVAL_ENV.py": ["/LANGEVIN2D_ENV.py"], "/TRAIN_AGENT.py": ["/LANGEVIN2D_ENV.py"]}
|
10,951
|
F4R4N/film-review
|
refs/heads/main
|
/contactus/urls.py
|
from django.urls import path
from .views import CreateForm, AdminContactReader, AdminSendMassEmail
app_name = "contact"
urlpatterns = [
path("contact/", CreateForm.as_view()),
path("admin/contact/", AdminContactReader.as_view()),
path("admin/contact/<int:year>/<int:month>/", AdminContactReader.as_view()),
path("admin/mass_mail/", AdminSendMassEmail.as_view()),
]
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,952
|
F4R4N/film-review
|
refs/heads/main
|
/config/urls.py
|
from django.contrib import admin
from django.urls import path, include
from . import settings
from django.conf.urls.static import static
from rest_framework import permissions
from drf_yasg import openapi
from drf_yasg.views import get_schema_view
schema_view = get_schema_view(
openapi.Info(
title="Film Review Api Documentation",
default_version='v1',
description="all you need to know about the film review api is in the following documentation please dont bother.",
contact=openapi.Contact(email="farantgh@gmail.com"),
license=openapi.License(name="BSD License"),
),
public=True,
permission_classes=(permissions.AllowAny,),
)
urlpatterns = [
path('admin/', admin.site.urls),
path('v1/auth/', include('customauth.urls')),
path('v1/api/', include('api.urls')),
path('v1/blog/', include('blog.urls')),
path('v1/', include('contactus.urls')),
path('doc/', schema_view.with_ui('swagger', cache_timeout=0), name='schema_swagger_ui')
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,953
|
F4R4N/film-review
|
refs/heads/main
|
/blog/views.py
|
from rest_framework.response import Response
from rest_framework import status, permissions, generics
from rest_framework.views import APIView
from django.shortcuts import get_object_or_404
from .models import Post, Tag, Comment
from api.models import Group
from .serializers import PostSerializer, DemoPostSerializer, CommentSerializer
from .utils import CustomPaginator
class CreateAndGetUserPost(APIView):
"""
on GET return all post of the authenticated user.
on POST create new post with fields:
required=(title, body, visibility)
optional=(image, tags) 'tags' is an array of tags name if not exist create new one
"""
permission_classes = (permissions.IsAuthenticated, )
def post(self, request, format=None):
"""
Attributes
----------
user -> User(object) : authenticated user which sending the request
required_fields -> list : list of required fields should be in request.data
valid_visibilities -> list : list of visibilities that user can use in request.data
post -> Post(object) : include the post object that create with user provided data
tag_obj -> Tag(object) : contain user desired tag object
Responses
----------
400 -> key="detail", value="field '{0}' is required." {0} can be : ["title", "body", "visibility"]
400 -> [key="detail", value="value '{0}' is not valid." {0} is request.data["visibility"] provided by user], [key="valid values", value=["draft", "group", "all"]]
201 -> key="detail", value="post created."
Input Types
----------
Required
request.data["title"] -> String
request.data["body"] -> String
request.data["visibility"] -> String (of choices ["draft", "group", "all"])
Optional
request.data["image"] -> Image
request.data["tags"] -> Array (of tags name)
"""
user = request.user
required_fields = ["title", "body", "visibility"]
# check if all required fields are included in 'request.data'
for field in required_fields:
if field not in request.data:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "field '{0}' is required.".format(field)})
valid_visibilities = ["draft", "group", "all"]
# check if user provided 'visibility' type is in valid types or not
if not request.data["visibility"] in valid_visibilities:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "value '{0}' is not valid.".format(
request.data["visibility"]), "valid values": valid_visibilities})
# create a post object in database with the given data
post = Post.objects.create(
title=request.data["title"],
body=request.data["body"],
visibility=request.data["visibility"],
author=user,
)
# 'image' and 'tags' are two optional fields. chack if provided then add them to the 'post' object
if "image" in request.data:
post.image = request.data["image"]
if "tags" in request.data:
# request.data["tags"] is a json array. iterate in the list and if exist add them to the object.
for tag in request.data["tags"]:
# if tags dont exist create one
try:
tag_obj = Tag.objects.get(name=tag)
except Tag.DoesNotExist:
tag_obj = Tag.objects.create(name=tag)
# add tag to ManyToMany field with name 'tags'
post.tags.add(tag_obj)
# save 'image' and 'tags' if provided
post.save()
return Response(
status=status.HTTP_201_CREATED, data={"detail": "post created."})
def get(self, request, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
posts -> django.db.models.query.QuerySet(object) : queryset of all Post objects that author is the authenticated user
serializer -> blog.serializers.PostSerializer(object) : contain serialized data of 'posts' queryset
Responses
----------
200 -> return seialized data in serializer variable, return all posts that author of it is user that sending the request
"""
user = request.user
# get all posts that author is 'user'
posts = Post.objects.filter(author=user)
# serialize the object with 'many=True' allow us to serialize a queryset of post
serializer = PostSerializer(instance=posts, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
class EditAndDeletePost(APIView):
"""
should pass post key in url.
mothods=[
PUT=('title', 'body', 'visibility', 'image', 'tags') visibility should be choice of ('draft', 'group', 'all')
DELETE=just pass the post key in url path
]
"""
permission_classes = (permissions.IsAuthenticated, )
def put(self, request, post_key, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
post -> blog.models.Post(object) : contain post object that 'key'= provided key in the url if not exist return 404
Responses
----------
404 -> key="detail", value="Not found." : if the given 'key' in the url is not refer to a Post object
403 -> key="detail", value="you dont have permission to perform this action." : if user sending the request not post author
400 -> key="detail", value="no new data provided." : if not key exist in request.data
400 -> [key="detail", value="value '{0}' is not valid." {0} is request.data["visibility"] provided by user], [key="valid values", value=["draft", "group", "all"]]
200 -> key="detail", value="updated"
Input Types
----------
Required
no required field. but at least one field should be in request.
post_key -> String : in the url
Optional
request.data["title"] -> String
request.data["body"] -> String
request.data["visibility"] -> String (of choices ["draft", "group", "all"])
request.data["image"] -> Image
request.data["tags"] -> Array (of tags name)
"""
user = request.user
# get the post that editing should perform on it
post = get_object_or_404(Post, key=post_key)
# check if user own the post or not if not return unathorized
if post.author != user:
return Response(
status=status.HTTP_403_FORBIDDEN,
data={"detail": "you dont have permission to perform this action."})
# check if reques.data is not empty
if len(request.data) == 0:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "no new data provided."})
# all the fields sending are optional so we check for every of them are included or not
if "title" in request.data:
post.title = request.data["title"]
if "body" in request.data:
post.body = request.data["body"]
if "visibility" in request.data:
valid_visibilities = ["draft", "group", "all"]
# check if given vissibility is valid and in choices
if not request.data["visibility"] in valid_visibilities:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "value '{0}' is not valid.".format(
request.data["visibility"]),
"valid values": valid_visibilities})
post.visibility = request.data["visibility"]
if "image" in request.data:
post.image = request.data["image"]
# request.data["tags"] is a json array. iterate in the list and if exist add them to the object.
if "tags" in request.data:
for tag in request.data["tags"]:
try:
tag_obj = Tag.objects.get(name=tag)
except Tag.DoesNotExist:
tag_obj = Tag.objects.create(name=tag)
post.tags.add(tag_obj)
# save fields that modified
post.save()
return Response(status=status.HTTP_200_OK, data={"detail": "updated"})
def delete(self, request, post_key, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
post -> blog.models.Post(object) : contain post object that 'key'= provided key in the url if not exist return 404
Responses
----------
404 -> key="detail", value="Not found." : if the given 'key' in the url is not refer to a Post object
403 -> key="detail", value="you dont have permission to perform this action." : if user sending the request not post author
200 -> key="detail", value="deleted"
Input Types
----------
post_key -> String : in the url
"""
user = request.user
post = get_object_or_404(Post, key=post_key)
if post.author != user:
return Response(
status=status.HTTP_403_FORBIDDEN,
data={"detail": "you dont have permission to perform this action."})
post.delete()
return Response(status=status.HTTP_200_OK, data={"detail": "deleted"})
class AllPublicPostsPaginated(generics.ListAPIView):
"""
return all posts with 'visibility="all"'
to prevent load all data at once add paginatior .
with scroll should get next page for that pass parameter 'page' to url like
'.../?page=2'.
"""
permission_classes = (permissions.IsAuthenticated, )
queryset = Post.objects.filter(visibility="all")
serializer_class = DemoPostSerializer
# paginator class is defined in .utils.py . 'page_size' = 5 and page parameter to access difrent pages is 'page'
pagination_class = CustomPaginator
class DesiredPost(APIView):
"""
return all data of the desierd post. post key should pass in url.
"""
permission_classes = (permissions.IsAuthenticated, )
def get(self, request, post_key, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
post -> blog.models.Post(object) : contain post object that 'post_key' provided key in the url if not exist return 404
serializer -> blog.serializers.PostSerializer(object) : contain serialized data of 'post' object
Responses
----------
404 -> key="detail", value="Not found." : if the post with the given post_key not found
403 -> key="detail", value="this post is restricted to group and you are not part of the group." : if user is not member of any groups that author of post is a member of
200 -> return serialized data of the post you passed 'post_key' in url
Input Types
----------
post_key -> String : in the url
"""
user = request.user
post = get_object_or_404(Post, key=post_key)
# check if the desiered post restricted to the group members
if post.visibility == "group":
# all geoups that author of the post is member of them
author_groups = post.author.profile.group.all()
# store if user is in the authors group or not
is_member = []
for igroup in author_groups:
# check if any of user group key is equal to igroup.key. if it was append "True" to "is_member" else append "False"
if user.profile.group.filter(key=igroup.key).exists():
is_member.append(True)
else:
is_member.append(False)
# if not user is member of at least one of author group return forbiden response
if not any(is_member):
return Response(
status=status.HTTP_403_FORBIDDEN,
data={"detail": "this post is restricted to group and you are not part of the group."})
# else return the proper serialized json object
serializer = PostSerializer(instance=post)
# and also add 1 to visits
post.visits += 1
post.save()
return Response(status=status.HTTP_200_OK, data=serializer.data)
class GroupPublicPostsPaginated(APIView):
"""
return all posts with 'visibility="group"'
in the given group key . group_key should pass in url
"""
permission_classes = (permissions.IsAuthenticated, )
def get(self, request, group_key, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
group -> api.models.Group(object) : the group object that group_key sent as parameter
posts -> django.db.models.query.QuerySet(object) : contain all posts with 'visibility="group"' and where author is part of the group
serializer -> blog.serializers.DemoPostSerializer(object) : contain serialized object with "many='True'"
Responses
----------
400 -> key="detail", value="you are not member of this group." : if user is not member of the group that group_key passed in the url
404 -> key="detail", value="Not found." : if the group with the given group_key not found
200 -> return serialized data
Input Types
----------
group_key -> String : in the url
"""
user = request.user
# check if user is member of group
if not user.profile.group.filter(key=group_key).exists():
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "you are not member of this group."})
group = get_object_or_404(Group, key=group_key)
# queryset of posts that theyre author is part of the given group and visibility is "group" restricted
posts = Post.objects.filter(author__profile__group=group, visibility="group")
serializer = DemoPostSerializer(instance=posts, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
class CreateComment(APIView):
"""
is available for authenticated users.
field 'body' is required. and post_key should include in url.
"""
permission_classes = (permissions.IsAuthenticated, )
def post(self, request, post_key, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
comment -> blog.models.Comment(object) : comment object that create by user data
Responses
----------
400 -> key="detail", value="field 'body' is required." : if 'body' key not in request.data
404 -> key="detail", value="Not found." : if the post with the given post_key not found
201 -> [key="detail", value="comment created."], [key="data", value=(comment object)]
Input Types
----------
request.data["body"] -> String
post_key -> String : in the url
"""
user = request.user
# return 400 response if "body field is not in request.data"
if "body" not in request.data:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "field 'body' is required."})
# create comment with with given "body" and logged in user and 'post_key' that will be send in url
comment = Comment.objects.create(
post=get_object_or_404(Post, key=post_key),
author=user,
body=request.data["body"],
)
serializer = CommentSerializer(instance=comment)
# also decided to send data of comment in response
return Response(
status=status.HTTP_201_CREATED,
data={"detail": "comment created.", "data": serializer.data})
class EditAndDeleteComment(APIView):
"""
methods = [
PUT= can edit comment body. is available only for comment owner. should include comment key in the url.
DELETE= only comment owner and post owner can delete comments. should include comment key in the url.
]
"""
permission_classes = (permissions.IsAuthenticated, )
def put(self, request, comment_key, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
comment -> blog.models.Comment(object) : get the desired Comment object with the comment_key
Responses
----------
404 -> key="detail", value="Not found." : if the given 'comment_key' in the url is not refer to a Comment object
403 -> key="detail", value="you dont have permission to perform this action." : if user sending the request is not comment author or post owner
400 -> key="detail", value="no new data provided." : if no field provided in request
200 -> key="detail", value="comment modified."
Input Types
----------
comment_key -> String : in the end of url with slash(/)
reuqest.data["body"] -> String
"""
user = request.user
comment = get_object_or_404(Comment, key=comment_key)
# only user can edit its own comment
if comment.author != user:
return Response(
status=status.HTTP_403_FORBIDDEN,
data={"detail": "you dont have permission to perform this action."})
# check if request.data not empty
if len(request.data) == 0:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "no new data provided."})
if "body" in request.data:
comment.body = request.data["body"]
comment.save()
return Response(
status=status.HTTP_200_OK, data={"detail": "comment modified."})
def delete(self, request, comment_key, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
comment -> blog.models.Comment(object) : get the desired Comment object with the comment_key
post_owner -> django.contrib.auth.models.User(object) : contain user object that own the post
Responses
----------
404 -> key="detail", value="Not found." : if the given 'comment_key' in the url is not refer to a Comment object
403 -> key="detail", value="you dont have permission to perform this action." : if user sending the request not comment author or post owner
200 -> key="detail", value="deleted"
Input Types
----------
comment_key -> String : in the end of url with slash(/)
"""
user = request.user
comment = get_object_or_404(Comment, key=comment_key)
post_owner = comment.post.author
# only comment owner and post owner can delete a comment
if comment.author != user:
if user != post_owner:
return Response(
status=status.HTTP_403_FORBIDDEN,
data={"detail": "you dont have permission to perform this action."})
comment.delete()
return Response(status=status.HTTP_200_OK, data={"detail": "deleted"})
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,954
|
F4R4N/film-review
|
refs/heads/main
|
/api/views_utils.py
|
from django.shortcuts import get_object_or_404
from .models import Group
from itertools import chain
def all_movies_in_group(key):
group = get_object_or_404(Group, key=key)
all_profiles = group.profile_set.all()
all_group_movies = []
for profile in all_profiles:
user_movies = profile.user.movie.filter(watched=False).values_list("key", flat=True)
all_group_movies.append(list(user_movies))
all_group_movie_keys = list(chain.from_iterable(all_group_movies))
return all_group_movie_keys
def have_permission_for_group(group_key, user):
if not user.profile.group.filter(key=group_key).exists():
return False
return True
def is_admin_user(group_key, user):
group = get_object_or_404(Group, key=group_key)
if group.admin != user:
return False
return True
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,955
|
F4R4N/film-review
|
refs/heads/main
|
/contactus/utils.py
|
import string
import random
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
def random_key():
all_digits = list(string.digits)
random_key = ""
return random_key.join(random.sample(all_digits, 10))
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,956
|
F4R4N/film-review
|
refs/heads/main
|
/customauth/models.py
|
from django.db import models
from django.contrib.auth.models import User
from api.models import Group
from api.utils import profile_image, random_key
class Profile(models.Model):
key = models.CharField(
max_length=16, default=random_key, unique=True, blank=False, null=False)
user = models.OneToOneField(
User, on_delete=models.CASCADE, related_name='profile')
image = models.ImageField(
upload_to=profile_image, default="profile/default/default.png")
group = models.ManyToManyField(Group, blank=True)
def __str__(self):
return self.user.username
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,957
|
F4R4N/film-review
|
refs/heads/main
|
/blog/serializers.py
|
from rest_framework import serializers
from .models import Post, Tag, Comment
from django.contrib.auth.models import User
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
fields = ("name", "slug")
class AuthorSerializer(serializers.ModelSerializer):
"""
needed data from ahtor to send in PostSerializer and CommentSerializer is
only profile.key and username
"""
key = serializers.CharField(source='profile.key')
class Meta:
model = User
fields = ("key", "username")
class DemoPostSerializer(serializers.ModelSerializer):
"""
to ensure visits are not fake just by loading the data in frontend.
first send demodata that is small part of data.
then user can access the full data of post with entering the given
link and there we can add visit.
"""
author = AuthorSerializer()
class Meta:
model = Post
fields = ("key", "title", "created", "author", )
class CommentSerializer(serializers.ModelSerializer):
author = AuthorSerializer()
class Meta:
model = Comment
fields = ("key", "author", "body", "created")
class PostSerializer(serializers.ModelSerializer):
"""
full data of a post that if the user click on link of demodata will see.
tags, author of post and comments are relations in database so we use
their serializers to access the related data.
'comments' are backward relational queryset.
original should be comment_set but renamed to comments in models.comment
"""
tags = TagSerializer(many=True)
author = AuthorSerializer()
comments = CommentSerializer(many=True)
class Meta:
model = Post
fields = (
"key", "title", "body", "author", "tags", "image", "visibility", "created",
"updated", "visits", "comments")
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,958
|
F4R4N/film-review
|
refs/heads/main
|
/contactus/serializers.py
|
from rest_framework import serializers
from .models import Contact
class ContactSerializer(serializers.ModelSerializer):
class Meta:
model = Contact
fields = (
"key", "name", "email", "subject", "text", "phone_number", "address",
"datetime", "ip", "is_readed")
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,959
|
F4R4N/film-review
|
refs/heads/main
|
/api/utils.py
|
import random
import string
from datetime import datetime
def profile_image(instance, filename):
username = instance.user.username
date_time = datetime.now().strftime("%Y_%m_%d,%H:%M:%S")
saved_file_name = username + "-" + date_time + ".jpg"
return 'profile/{0}/{1}'.format(instance.user.username, saved_file_name)
def group_image(instance, filename):
date_time = datetime.now().strftime("%Y_%m_%d,%H:%M:%S")
saved_file_name = instance.name + "-" + date_time + ".jpg"
return 'group/{0}/{1}'.format(instance.name, saved_file_name)
def random_key():
allowed_chars = list(string.ascii_lowercase) + list(string.digits)
random_key = ""
key = random_key.join(random.sample(allowed_chars, 15))
return key
def invite_code():
allowed_chars = list(string.ascii_lowercase)
random_key = ""
key = random_key.join(random.sample(allowed_chars, 15))
return "FILMMEETING" + key
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,960
|
F4R4N/film-review
|
refs/heads/main
|
/contactus/views.py
|
from rest_framework import permissions, status
from rest_framework.response import Response
from rest_framework.views import APIView
from django.core.mail import EmailMessage
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from .models import Contact, MassEmail
from .utils import get_client_ip
from .serializers import ContactSerializer
from . import settings_loader
class CreateForm(APIView):
permission_classes = (permissions.AllowAny, )
def post(self, request, format=None):
required_fields = ["name", "email", "text"]
for field in required_fields:
if field not in request.data:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "{} is required".format(field)})
ip = get_client_ip(request)
contact = Contact.objects.create(
name=request.data["name"],
email=request.data["email"],
text=request.data["text"]
)
contact.ip = ip
if "subject" in request.data:
contact.subject = request.data["subject"]
if "phone_number" in request.data:
contact.phone_number = request.data["phone_number"]
if "address" in request.data:
contact.address = request.data["address"]
contact.save()
if settings_loader.DEFAULT_CONTACT_US_SETTINGS["SEND_MAIL"]:
admin_name = list(User.objects.filter(is_superuser=True, is_staff=True).values_list("first_name", flat=True))[0]
mail_subject = settings_loader.DEFAULT_CONTACT_US_SETTINGS["APP_NAME"] + settings_loader.DEFAULT_CONTACT_US_SETTINGS["MAIL_SUBJECT"]
message = "dear, " + contact.name + settings_loader.DEFAULT_CONTACT_US_SETTINGS["MESSAGE"] + admin_name
to_email = contact.email
EmailMessage(mail_subject, message, to=[to_email]).send()
return Response(status=status.HTTP_200_OK, data={"detail": "form created."})
class AdminContactReader(APIView):
permission_classes = (permissions.IsAdminUser, )
def get(self, request, year=None, month=None, format=None):
contacts = Contact.objects.all()
if all([year, month]):
contacts = Contact.objects.filter(datetime__year=year, datetime__month=month)
serializer = ContactSerializer(instance=contacts, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
class AdminEditIsReaded(APIView):
permission_classes = (permissions.IsAdminUser, )
def put(self, request, key, format=None):
contact = get_object_or_404(Contact, key=key)
if "is_readed" not in request.data:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "'is_readed' field not provided."})
contact.is_readed = request.data["is_readed"]
contact.save()
return Response(status=status.HTTP_200_OK, data={"detail": "updated"})
class AdminSendMassEmail(APIView):
permission_classes = (permissions.IsAdminUser, )
def post(self, request, format=None):
fields = ["subject", "text"]
for field in fields:
if field not in request:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "field '{0}' not provided.".format(field)})
email = MassEmail.objects.create(subject=request.data["subject"], text=request.data["text"])
if "name" in request.data:
email.name = request.data["name"]
email.save()
mail_subject = request.data["subject"]
message = request.data["text"]
all_emails = list(Contact.objects.all().values_list("email", flat=True))
EmailMessage(mail_subject, message, to=all_emails).send()
return Response(status=status.HTTP_200_OK, data={"detail": "mass email sent.", "emails": all_emails})
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,961
|
F4R4N/film-review
|
refs/heads/main
|
/api/serializers.py
|
from rest_framework import serializers
from .models import Group, Movie
from django.contrib.auth.models import User
from customauth.models import Profile
class AdminSerializer(serializers.ModelSerializer):
"""
used as base of admin field in MovieSerializer and GroupSerializer serializers
only retrive admin username.
model = User
"""
class Meta:
model = User
fields = ("username", )
class MovieSerializer(serializers.ModelSerializer):
"""
used AdminSerializer serializer to get owner user username and show it in
returning data.
model = Movie
"""
user = AdminSerializer(read_only=True, many=False)
class Meta:
model = Movie
fields = (
"key", "name", "description", "user", "year", "imdb_rate", "watched",
"download_link", "poster_link")
class MovieProfileSerializer(serializers.ModelSerializer):
"""
its quite similar to MovieSerializer but it dont contain user in fields.
we use it as a related object in MemberSerializer.
it used to show each user movie in public to group members endpoint.
model = Movie
"""
class Meta:
model = Movie
fields = (
"key", "name", "description", "year", "imdb_rate", "watched",
"download_link", "poster_link", "review")
class MemberSerializer(serializers.ModelSerializer):
"""
used to show each user username and movies of it.
model = User
"""
movies = MovieProfileSerializer(many=True, source='movie')
class Meta:
model = User
fields = ("username", "movies")
class GroupSerializer(serializers.ModelSerializer):
"""
used to show Group information.
used AdminSerializer to relativly get group admin user username.
model = Group
"""
admin = AdminSerializer(read_only=True, many=False)
class Meta:
model = Group
fields = (
"key", "name", "movie_of_the_week", "admin", "image", "meeting_detail")
class GroupMemberSerializer(serializers.ModelSerializer):
"""
used to show group members key, movies, image, username.
"""
user = MemberSerializer(read_only=True)
class Meta:
model = Profile
fields = ("key", "image", "user")
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,962
|
F4R4N/film-review
|
refs/heads/main
|
/customauth/serializers.py
|
from rest_framework import serializers
from rest_framework.validators import UniqueValidator
from django.contrib.auth.models import User
from django.contrib.auth.password_validation import validate_password
from .models import Profile
from rest_framework_simplejwt.tokens import RefreshToken
from rest_framework_simplejwt.serializers import TokenObtainSerializer
class RegisterSerializer(serializers.ModelSerializer):
email = serializers.EmailField(
required=True,
validators=[UniqueValidator(queryset=User.objects.all())])
password1 = serializers.CharField(
write_only=True, required=True, validators=[validate_password])
password2 = serializers.CharField(
write_only=True, required=True, validators=[validate_password])
class Meta:
model = User
fields = (
'username', 'password1', 'password2', 'email', 'first_name',
'last_name')
extra_kwargs = {
'first_name': {'required': False},
'last_name': {'required': False},
}
def validate(self, attrs):
if attrs['password1'] != attrs['password2']:
raise serializers.ValidationError(
{'password1': "password field dont match !"})
return attrs
def create(self, validated_data):
user = User.objects.create(
username=validated_data['username'],
email=validated_data['email'],
)
if 'first_name' not in validated_data:
user.first_name = ""
else:
user.first_name = validated_data['first_name']
if 'last_name' not in validated_data:
user.last_name = ""
else:
user.last_name = validated_data['last_name']
user.is_active = False
user.set_password(validated_data['password1'])
profile = Profile.objects.create(user=user)
profile.save()
user.save()
return user
class UserProfileSerializer(serializers.ModelSerializer):
image = serializers.ImageField(source='profile.image')
key = serializers.CharField(source='profile.key')
class Meta:
model = User
fields = (
"key", "username", "email", "first_name", "last_name", "image")
class UserLoginSerializer(TokenObtainSerializer):
@classmethod
def get_token(cls, user):
return RefreshToken.for_user(user)
def validate(self, attrs):
data = super().validate(attrs)
refresh = self.get_token(self.user)
data['tokens'] = {
'refresh': str(refresh),
"access": str(refresh.access_token)}
data['user'] = {
'key': self.user.profile.key,
'username': self.user.username,
'email': self.user.email,
'first_name': self.user.first_name,
'last_name': self.user.last_name,
'image': self.user.profile.image.url}
return data
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,963
|
F4R4N/film-review
|
refs/heads/main
|
/customauth/urls.py
|
from django.urls import path
from .views import (
RegisterView, ChangePasswordView, UpdateProfileView, LogoutView,
UpdateUserImageView, DeleteProfileView, ForgotPasswordView,
ValidateConfirmationCodeView, ResetPasswordView, UserLoginView,
GetUserProfile)
from rest_framework_simplejwt.views import TokenRefreshView
app_name = "customauth"
urlpatterns = [
path('register/', RegisterView.as_view(), name='auth_register'),
path('login/', UserLoginView.as_view(), name='token_obtain_pair'),
path('login/refresh/', TokenRefreshView.as_view(), name='token_refresh'),
path('dashboard/profile/', GetUserProfile.as_view()),
path('dashboard/change_password/<key>/', ChangePasswordView.as_view(), name='auth_change_password'),
path('dashboard/update_profile/<key>/', UpdateProfileView.as_view(), name='auth_update_profile'),
path('dashboard/change_image/<key>/', UpdateUserImageView.as_view(), name='auth_image'),
path('dashboard/logout/', LogoutView.as_view(), name='auth_logout'),
path('dashboard/delete_profile/<key>/', DeleteProfileView.as_view(), name='auth_delete_profile'),
path('forgot_password/', ForgotPasswordView.as_view(), name='auth_forgot_password'),
path('confirm/', ValidateConfirmationCodeView.as_view(), name='auth_confirm'),
path('reset_password/<key>/', ResetPasswordView.as_view(), name='auth_reset_password')
]
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,964
|
F4R4N/film-review
|
refs/heads/main
|
/blog/utils.py
|
import datetime
from rest_framework import pagination
def post_images(instance, filename):
"""
specify proper path to store posts images in the following format
profile/{post_author_username}/post/{date_and_time in following format
(%y-%m-%d.%H-%M-%S)}
"""
date_and_time = datetime.datetime.now().strftime("%y-%m-%d.%H-%M-%S")
username = instance.author.username
path = "profile/{0}/post/{1}.jpg".format(username, date_and_time)
return path
class CustomPaginator(pagination.PageNumberPagination):
"""
paginator object used for posts/all/ endpoint so that all of the data wont
load at once and load them page by page. page_size is 30 now and can access
next page by passing page parameter in the url like : posts/all/?page=2
"""
max_page_size = 100
page_size = 30
page_query_param = "page"
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,965
|
F4R4N/film-review
|
refs/heads/main
|
/api/views.py
|
from rest_framework import permissions, status
from rest_framework.views import APIView
from rest_framework.response import Response
from django.shortcuts import get_object_or_404
from .serializers import (
GroupSerializer, MovieSerializer, GroupMemberSerializer
)
from .models import Group, Movie
from .views_utils import (
all_movies_in_group, have_permission_for_group, is_admin_user
)
from .utils import invite_code
from customauth.models import Profile
from config.settings import MOVIE_PER_USER
import random
class CreateGroupView(APIView):
""" create group allowed for every authenticated user. get field ['name']. """
permission_classes = (permissions.IsAuthenticated, )
def post(self, request, format=None):
"""
Attributes
----------
group -> api.models.Group(object) : contain group object made with user data
serializer -> api.serializers.GroupSerializer(object) : contain serialized data of 'group' object
Responses
----------
400 -> key="detail", value="name is not in request." : if field 'name' not in request.data
400 -> key="detail", value="group name already exisit." : if group name already exists
201 -> [key="detail", value="group '{0}' added." : {0} is group given name], [key="data", value=contain serialized data]
Input Types
----------
Required
request.data["name"] -> String
Optional
request.data["image"] -> Image
"""
if "name" not in request.data:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "name is not in request."})
if Group.objects.filter(name=request.data['name']).exists():
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "group name already exisit."})
group = Group.objects.create(
name=request.data['name'],
admin=request.user
)
if "image" in request.data:
group.image = request.data["image"]
group.save()
request.user.profile.group.add(group)
serializer = GroupSerializer(instance=group)
return Response(
status=status.HTTP_201_CREATED,
data={"detail": "group '{0}' added.".format(group.name), "data": serializer.data})
class EditAndDeleteGroupView(APIView):
""" admin only with put 'name' and 'users' , users is an array of users key. and group key should include in url. """
permission_classes = (permissions.IsAuthenticated, )
def put(self, request, group_key, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
group -> api.models.Group(object) : contain group object which key is group_key
Responses
----------
403 -> key="detail", value="you dont have permission to perform this action." : if user not group admin
400 -> key="detail", value="no new data provided." : if no field in request.data
404 -> key="detail", value="Not found." : if the given 'group_key' in the url is not refer to a Group object
200 -> ket="detail", value="modified"
Input Types
----------
request.data["name"] -> String
request.data["image"] -> Image
request.data["users"] -> Array (of user 'key's)
group_key -> String : in url
"""
user = request.user
if not is_admin_user(group_key, user):
return Response(
status=status.HTTP_403_FORBIDDEN,
data={"detail": "you dont have permission to perform this action."})
if len(request.data) == 0:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "no new data provided."})
group = get_object_or_404(Group, key=group_key)
if "name" in request.data:
group.name = request.data["name"]
if "image" in request.data:
group.image = request.data["image"]
group.save()
if "users" in request.data:
users = request.data["users"]
for user in users:
profile = get_object_or_404(Profile, key=user)
profile.group.remove(group)
return Response(status=status.HTTP_200_OK, data={"detail": "modified"})
def delete(self, request, group_key, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
group -> api.models.Group(object) : contain group object which key is group_key
Responses
----------
403 -> key="detail", value="you dont have permission to perform this action." : if user not group admin
404 -> key="detail", value="Not found." : if the given 'group_key' in the url is not refer to a Group object
200 -> key="detail", value="group '{0}' deleted." : {0} is group name
Input Types
----------
group_key -> String : in url
"""
user = request.user
if not is_admin_user(group_key, user):
return Response(
status=status.HTTP_403_FORBIDDEN,
data={"detail": "you dont have permission to perform this action."})
group = get_object_or_404(Group, key=group_key)
group.delete()
return Response(
status=status.HTTP_200_OK,
data={"detail": "group '{0}' deleted.".format(group.name)})
class CreateAndGetMovieView(APIView):
""" on post add movie can include fields ["name", "description", "year", "imdb_rate", "download_link", "poster_link", "review"] and name is required. on get return all movies of user. """
permission_classes = (permissions.IsAuthenticated, )
def post(self, request, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
user_movies_count -> int : count all movies of the user
movie -> api.models.Movie(object) : contain movie object that get created with user data
Responses
----------
406 -> key="detail", value="you reached the limit of adding movie. limit:{0}" {0} is limit that set in projects settings
400 -> key="detail", value="'name' is required." : if not field 'name' in request.data
400 -> key="detail", value="movie with this name already exists." : if movie name included in user movies
201 -> key="detail", value="movie '{0}' created" : {0} is given movie name
Input Types
----------
Required
request.data["name"] -> String
Optional
request.data["description"] -> String
request.data["year"] -> int
request.data["imdb_rate"] -> Float
request.data["download_link"] -> String
request.data["poster_link"] -> String
request.data["review"] -> String
TODO:Frontend: check for poster_link and download_link to be valid url
"""
user = request.user
user_movies_count = Movie.objects.filter(user=user).count()
if user_movies_count >= MOVIE_PER_USER:
return Response(
status=status.HTTP_406_NOT_ACCEPTABLE,
data={"detail": "you reached the limit of adding movie. limit:{0}".format(MOVIE_PER_USER)})
if "name" not in request.data:
return Response(
status=status.HTTP_400_BAD_REQUEST, data={"detail": "'name' is required."})
if Movie.objects.filter(user=user, name=request.data["name"]).exists():
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "movie with this name already exists."})
movie = Movie.objects.create(name=request.data["name"], user=user)
if "description" in request.data:
movie.description = request.data["description"]
if "year" in request.data:
movie.year = request.data["year"]
if "imdb_rate" in request.data:
movie.imdb_rate = request.data["imdb_rate"]
if "download_link" in request.data:
movie.download_link = request.data["download_link"]
if "poster_link" in request.data:
movie.poster_link = request.data["poster_link"]
if "review" in request.data:
movie.review = request.data["review"]
movie.save()
return Response(
status=status.HTTP_201_CREATED,
data={"detail": "movie '{0}' created".format(movie.name)})
def get(self, request, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
movies -> django.db.models.query.QuerySet(object) : contain all user movies
serializer -> api.serializers.MovieSerializer(object) : contain serialized data of 'movies' queryset
Responses
----------
200 -> serialized movies data in json
"""
user = request.user
movies = user.movie.all()
serializer = MovieSerializer(instance=movies, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
class EditAndDeleteMovieView(APIView):
""" for both edit and delete should include movie key. put can include fields ["name", "description", "year", "imdb_rate", "watched", "download_link", "poster_link", "review"] """
permission_classes = (permissions.IsAuthenticated, )
def put(self, request, key, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
movie -> api.models.Movie(object) : contain movie object that key passed to url
Responses
----------
404 -> key="detail", value="Not found." : if the given 'key' in the url is not refer to a movie object
401 -> key="detail", value="you dont have permission for this movie." : if user requesting dont match the movie owner
400 -> key="detail", value="no new data provided." : if there is no field in request.data
200 -> key="detail", value="updated"
Input Types
----------
Required
key -> String : in url
Optional : at least one of this should be in request.data
request.data["name"] -> String
request.data["description"] -> String
request.data["year"] -> int
request.data["imdb_rate"] -> float
request.data["watched"] -> bool
request.data["download_link"] -> String
request.data["poster_link"] -> String
request.data["review"] -> String
"""
user = request.user
movie = get_object_or_404(Movie, key=key)
if movie.user != user:
return Response(
status=status.HTTP_401_UNAUTHORIZED,
data={"detail": "you dont have permission for this movie."})
if len(request.data) == 0:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "no new data provided."})
if "name" in request.data:
movie.name = request.data["name"]
if "description" in request.data:
movie.description = request.data["description"]
if "year" in request.data:
movie.year = request.data["year"]
if "imdb_rate" in request.data:
movie.imdb_rate = request.data["imdb_rate"]
if "watched" in request.data:
movie.watched = request.data["watched"]
if "download_link" in request.data:
movie.download_link = request.data["download_link"]
if "poster_link" in request.data:
movie.poster_link = request.data["poster_link"]
if "review" in request.data:
movie.review = request.data["review"]
movie.save()
return Response(status=status.HTTP_200_OK, data={"detail": "updated"})
def delete(self, request, key, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user
which sending the request
movie -> api.models.Movie(object) : contain movie object that key passed to
url
Responses
----------
404 -> key="detail", value="Not found." : if the given 'key' in the url is not refer to a movie object
403 -> key="detail", value="you dont have permission for this movie." : if authenticated user is not movie owner
200 -> key="detail", value="movie '{0}' deleted." : {0} can be movie name
Input Types
----------
key -> String : in the url
"""
user = request.user
movie = get_object_or_404(Movie, key=key)
if movie.user != user:
return Response(
status=status.HTTP_403_FORBIDDEN,
data={"detail": "you dont have permission for this movie."})
movie.delete()
return Response(
status=status.HTTP_200_OK,
data={"detail": "movie '{0}' deleted.".format(movie.name)})
class GetRandomMovieView(APIView):
""" every user that is group member can get this. should include group key in url. """
permission_classes = (permissions.IsAuthenticated, )
def get(self, request, key, format=None):
"""
Attributes
----------
user -> django.contrib.auth.models.User(object) : authenticated user which sending the request
all_group_movie_keys -> list : store value of all_movies_in_group function
selected_movie_key -> str : a random movie key choosed from all_group_movie_keys list
movie -> api.models.Movie(object) : contain Movie object , with selected_movie_key key
serializer -> api.serializers.MovieSerializer(object) : contain serialized data of 'movie'
Responses
----------
404 -> key="detail", value="Not found." : given group key object not found
401 -> key="detail", value="you dont have permission for this group." : if user not a member of group
404 -> key="detail", value="Not found." : if choosed movie as random corrupt or delete
200 -> return serialized data of the choosed movie
Input Types
----------
key -> String : in the url
"""
user = request.user
if not have_permission_for_group(key, user):
return Response(
status=status.HTTP_401_UNAUTHORIZED,
data={"detail": "you dont have permission for this group."})
all_group_movie_keys = all_movies_in_group(key)
selected_movie_key = random.choice(all_group_movie_keys)
movie = get_object_or_404(Movie, key=selected_movie_key)
serializer = MovieSerializer(instance=movie)
return Response(status=status.HTTP_200_OK, data=serializer.data)
class SubmitMovieView(APIView):
""" every user that is a group member can submit a movie. should pass group key and movie key in url. """
permission_classes = (permissions.IsAuthenticated, )
def get(self, request, group, movie, format=None):
"""
Attributes
----------
Responses
----------
Input Types
----------
"""
user = request.user
group_obj = get_object_or_404(Group, key=group)
if not have_permission_for_group(group, user):
return Response(
status=status.HTTP_401_UNAUTHORIZED,
data={"detail": "you dont have permission for this group."})
all_movies_keys = all_movies_in_group(group)
if movie not in all_movies_keys:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "movie not found as one of the group members movie."})
movie = get_object_or_404(Movie, key=movie)
group_obj.movie_of_the_week = movie
group_obj.save()
movie.watched = True
movie.save()
return Response(
status=status.HTTP_200_OK,
data={"detail": "'{0}' selected as movie of the week.".format(movie.name)})
class AllUserGroups(APIView):
""" return all groups of authenticated user. """
permission_classes = (permissions.IsAuthenticated, )
def get(self, request, format=None):
"""
Attributes
----------
Responses
----------
Input Types
----------
"""
user = request.user
groups = user.profile.group.all()
serializer = GroupSerializer(instance=groups, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
class AllGroupMembersProfile(APIView):
""" return all group memebers of the group. group key should pass in url and available only for group members. """
permission_classes = (permissions.IsAuthenticated, )
def get(self, request, group_key, format=None):
"""
Attributes
----------
Responses
----------
Input Types
----------
"""
user = request.user
if not have_permission_for_group(group_key, user):
return Response(
status=status.HTTP_401_UNAUTHORIZED,
data={"detail": "you dont have permission for this group."})
group = get_object_or_404(Group, key=group_key)
all_members = Profile.objects.filter(group=group)
group_serializer = GroupMemberSerializer(instance=all_members, many=True)
return Response(status=status.HTTP_200_OK, data=group_serializer.data)
class GenerateInviteCode(APIView):
""" admin only and on get should pass group key. and return invite code. """
permission_classes = (permissions.IsAuthenticated, )
def get(self, request, group_key, format=None):
"""
Attributes
----------
Responses
----------
Input Types
----------
"""
user = request.user
if not is_admin_user(group_key, user):
return Response(
status=status.HTTP_403_FORBIDDEN,
data={"detail": "you dont have permission to perform this action."})
group = get_object_or_404(Group, key=group_key)
group.invite_code = invite_code()
group.save()
return Response(status=status.HTTP_200_OK, data={"code": group.invite_code})
class JoinGroup(APIView):
""" available for all authenticated users. invitation code should pass in url. """
permission_classes = (permissions.IsAuthenticated, )
def get(self, request, invite_code, format=None):
"""
Attributes
----------
Responses
----------
Input Types
----------
"""
user = request.user
if not Group.objects.filter(invite_code=invite_code).exists():
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "requested group does not exist, please inform the admin to generate a new key."})
group = get_object_or_404(Group, invite_code=invite_code)
user.profile.group.add(group)
return Response(
status=status.HTTP_200_OK,
data={"detail": "you are now a member of group '{0}'.".format(group.name)})
class LeaveGroup(APIView):
""" available for all authenticated users. group_key should pass in url. """
permission_classes = (permissions.IsAuthenticated, )
def get(self, request, group_key, format=None):
"""
Attributes
----------
Responses
----------
Input Types
----------
"""
user = request.user
group = get_object_or_404(Group, key=group_key)
if not user.profile.group.filter(group).exists():
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "you are not member of this group."})
user.profile.group.remove(group)
return Response(
status=status.HTTP_200_OK,
data={"detail": "you are not member of '{}' group anymore.".format(group.name)})
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,966
|
F4R4N/film-review
|
refs/heads/main
|
/contactus/settings_loader.py
|
from rest_framework.exceptions import ValidationError
DEFAULT_CONTACT_US_SETTINGS = {
'APP_NAME': None,
'SEND_MAIL': False,
'MAIL_SUBJECT': " Contact Us ",
'MESSAGE': "\nwe got your email. we will respond as soon as possible.\n\nBest Regards, "
}
try:
from django.conf import settings
except ImportError:
print("add CONTACT_US_SETTINGS to projects settings.py")
exit()
fields = ["APP_NAME", "SEND_MAIL", "MAIL_SUBJECT", "MESSAGE"]
for field in fields:
if field in settings.CONTACT_US_SETTINGS:
DEFAULT_CONTACT_US_SETTINGS[field] = settings.CONTACT_US_SETTINGS[field]
try:
from django.conf import settings
except ImportError:
raise ValidationError(
{"detail": "please add email backend in projects settings.py", "data":
["EMAIL_USE_TLS", "EMAIL_HOST", "EMAIL_PORT", "EMAIL_HOST_USER",
"EMAIL_HOST_PASSWORD"]})
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,967
|
F4R4N/film-review
|
refs/heads/main
|
/blog/admin.py
|
from django.contrib import admin
from .models import Post, Tag, Comment
@admin.register(Post)
class PostAdmin(admin.ModelAdmin):
list_display = ("title", "author", "key", "visibility", "created", "visits")
list_editable = ("visibility", "visits")
@admin.register(Tag)
class TagAdmin(admin.ModelAdmin):
list_display = ("name", "slug")
prepopulated_fields = {"slug": ("name", )}
@admin.register(Comment)
class CommentAdmin(admin.ModelAdmin):
list_display = ("post", "author", "key", "body", "is_active")
list_editable = ("is_active", )
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,968
|
F4R4N/film-review
|
refs/heads/main
|
/api/urls.py
|
from .views import (
CreateGroupView, EditAndDeleteGroupView, EditAndDeleteMovieView,
CreateAndGetMovieView, GetRandomMovieView, SubmitMovieView,
AllUserGroups, AllGroupMembersProfile, GenerateInviteCode, JoinGroup,
LeaveGroup
)
from django.urls import path
app_name = "api"
urlpatterns = [
path("group/add/", CreateGroupView.as_view()),
path("group/<str:group_key>/", EditAndDeleteGroupView.as_view()),
path("admin/group/invite_code/<str:group_key>/", GenerateInviteCode.as_view()),
path("group/join/<str:invite_code>/", JoinGroup.as_view()),
path("group/leave/<str:group_key>/", LeaveGroup.as_view()),
path("group/movie/select/<str:key>/", GetRandomMovieView.as_view()),
path("group/movie/submit/<str:group>/<str:movie>/", SubmitMovieView.as_view()),
path("group/all_profiles/<str:group_key>/", AllGroupMembersProfile.as_view()),
path("user/groups/", AllUserGroups.as_view()),
path("movie/", CreateAndGetMovieView.as_view()),
path("movie/<str:key>/", EditAndDeleteMovieView.as_view()),
]
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,969
|
F4R4N/film-review
|
refs/heads/main
|
/contactus/models.py
|
from django.db import models
from .utils import random_key
class ReadedManager(models.Manager):
def get_queryset(self):
return super(ReadedManager, self).get_queryset().filter(is_readed=True)
class Contact(models.Model):
key = models.CharField(default=random_key, unique=True, max_length=13)
name = models.CharField(max_length=30)
email = models.EmailField()
subject = models.CharField(max_length=30, blank=True, null=True)
text = models.TextField()
phone_number = models.CharField(max_length=13, blank=True, null=True)
address = models.CharField(max_length=40, blank=True, null=True)
datetime = models.DateTimeField(auto_now=True)
ip = models.GenericIPAddressField(blank=True, null=True)
is_readed = models.BooleanField(default=False)
objects = models.Manager()
readed = ReadedManager()
def __str__(self):
return self.name
class MassEmail(models.Model):
key = models.CharField(default=random_key, max_length=13, unique=True)
datetime = models.DateTimeField(auto_now=True)
admin_name = models.CharField(max_length=30, blank=True, null=True)
subject = models.CharField(max_length=50)
text = models.TextField()
def __str__(self):
return self.name
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,970
|
F4R4N/film-review
|
refs/heads/main
|
/api/models.py
|
from django.db import models
from .utils import random_key, invite_code, group_image
from django.contrib.auth.models import User
from django.core.validators import MinValueValidator, MaxValueValidator
import datetime
class Movie(models.Model):
key = models.CharField(max_length=15, default=random_key, unique=True)
name = models.CharField(max_length=100)
description = models.TextField(null=True, blank=True)
review = models.TextField(null=True, blank=True)
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name="movie")
year = models.PositiveIntegerField(validators=[
MinValueValidator(1800),
MaxValueValidator(datetime.datetime.now().year)],
null=True, blank=True)
imdb_rate = models.FloatField(validators=[
MinValueValidator(1),
MaxValueValidator(10)],
null=True, blank=True)
watched = models.BooleanField(default=False)
download_link = models.URLField(null=True, blank=True)
poster_link = models.URLField(null=True, blank=True)
date_and_time = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class Group(models.Model):
key = models.CharField(max_length=15, default=random_key)
name = models.CharField(max_length=50, unique=True)
meeting_detail = models.TextField(null=True, blank=True)
image = models.ImageField(
upload_to=group_image,
default="group/default/default.png")
movie_of_the_week = models.ForeignKey(
Movie, on_delete=models.CASCADE, null=True, blank=True)
admin = models.ForeignKey(User, on_delete=models.DO_NOTHING)
invite_code = models.CharField(max_length=28, default=invite_code)
date_and_time = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,971
|
F4R4N/film-review
|
refs/heads/main
|
/api/admin.py
|
from django.contrib import admin
from .models import Group, Movie
@admin.register(Group)
class GroupAdmin(admin.ModelAdmin):
list_display = (
"name", "key", "meeting_detail", "movie_of_the_week", "admin", "invite_code")
list_editable = ("meeting_detail", )
search_fields = ("name", "meeting_detail")
@admin.register(Movie)
class MovieAdmin(admin.ModelAdmin):
list_display = (
"name", "key", "description", "review", "user", "year", "imdb_rate",
"watched", "download_link", "poster_link")
list_editable = (
"description", "review", "year", "imdb_rate", "watched")
search_fields = ("name", "description", "review")
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,972
|
F4R4N/film-review
|
refs/heads/main
|
/blog/models.py
|
from django.db import models
from api.utils import random_key
from django.contrib.auth.models import User
from .utils import post_images
from django.utils.text import slugify
class Tag(models.Model):
name = models.CharField(max_length=50, unique=True)
slug = models.SlugField(unique=True)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
"""
make sure onsave object name will auto slugify and add to slug field
"""
self.slug = slugify(self.name)
super(Tag, self).save(*args, **kwargs)
class Post(models.Model):
VISIBILITY_CHOICES = (
('draft', 'Draft'),
('group', 'Group'),
('all', 'All'),
)
key = models.CharField(default=random_key, max_length=17, unique=True)
title = models.CharField(max_length=200)
body = models.TextField()
author = models.ForeignKey(User, on_delete=models.CASCADE)
tags = models.ManyToManyField(Tag, blank=True)
image = models.ImageField(upload_to=post_images, blank=True)
visibility = models.CharField(
max_length=6, choices=VISIBILITY_CHOICES, default='draft')
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
visits = models.PositiveIntegerField(default=0)
def __str__(self):
return self.title
class Comment(models.Model):
key = models.CharField(default=random_key, max_length=17, unique=True)
post = models.ForeignKey(
Post, on_delete=models.CASCADE, related_name='comments')
author = models.ForeignKey(User, on_delete=models.CASCADE)
body = models.TextField()
is_active = models.BooleanField(default=True)
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.post.title
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,973
|
F4R4N/film-review
|
refs/heads/main
|
/config/settings.py
|
from pathlib import Path
from datetime import timedelta
import os
from .config import CONFIG_SECRET_KEY, CONFIG_DEBUG, EMAIL, SMTP, PASSWORD
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = CONFIG_SECRET_KEY
DEBUG = CONFIG_DEBUG
ALLOWED_HOSTS = ["*"]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'api.apps.ApiConfig',
'customauth.apps.CustomauthConfig',
'contactus.apps.ContactusConfig',
'rest_framework',
'corsheaders',
'drf_yasg',
'blog.apps.BlogConfig',
'rest_framework.authtoken',
'rest_framework_simplejwt.token_blacklist',
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'config.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'config.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
# rest framework configs
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 3,
'TEST_REQUEST_DEFAULT_FORMAT': 'json',
# 'DEFAULT_RENDERER_CLASSES': [
# 'rest_framework.renderers.JSONRenderer',
# ],
# 'DEFAULT_PARSER_CLASSES': [
# 'rest_framework.parsers.JSONParser',
# ],
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAuthenticated',
],
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework_simplejwt.authentication.JWTAuthentication',
],
}
# django-cors-headers configs
CORS_ALLOW_ALL_ORIGINS = True
# simple jwt configs
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(days=3),
'REFRESH_TOKEN_LIFETIME': timedelta(days=7),
'ROTATE_REFRESH_TOKENS': True,
'BLACKLIST_AFTER_ROTATION': True,
}
# swagger (drf_yasg) configs
SWAGGER_SETTINGS = {
'USE_SESSION_AUTH': False,
'SECURITY_DEFINITIONS': {
'Bearer': {
'type': 'apiKey',
'name': 'Authorization',
'in': 'header'
}
}
}
# email configs
EMAIL_USE_TLS = True
EMAIL_HOST = SMTP
EMAIL_PORT = 587
EMAIL_HOST_USER = EMAIL
EMAIL_HOST_PASSWORD = PASSWORD
if not DEBUG:
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_SSL_REDIRECT = True
SESSION_COOKIE_SECURE = True
MOVIE_PER_USER = 10
CONTACT_US_SETTINGS = {
"APP_NAME": "Film Review",
"SEND_MAIL": True
}
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,974
|
F4R4N/film-review
|
refs/heads/main
|
/customauth/views.py
|
from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from django.contrib.auth.password_validation import validate_password
from django.core.exceptions import ValidationError
from django.core.mail import EmailMessage
from django.core.validators import validate_email
from .models import Profile
from .serializers import (
RegisterSerializer, UserLoginSerializer, UserProfileSerializer)
from rest_framework import generics, status
from rest_framework.views import APIView
from rest_framework.parsers import MultiPartParser
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from rest_framework_simplejwt.views import TokenViewBase
from rest_framework_simplejwt.tokens import RefreshToken
import time
import random
class RegisterView(generics.CreateAPIView):
queryset = User.objects.all()
permission_classes = (AllowAny,)
serializer_class = RegisterSerializer
class GetUserProfile(APIView):
permission_classes = (IsAuthenticated, )
def get(self, request, format=None):
user = request.user
serializer = UserProfileSerializer(instance=user)
return Response(status=status.HTTP_200_OK, data=serializer.data)
class ChangePasswordView(APIView):
permission_classes = (IsAuthenticated,)
def put(self, request, key, format=None):
user = request.user
if user.profile.key != key:
return Response(
status=status.HTTP_401_UNAUTHORIZED,
data={"authorize": "You dont have permission for this user !"})
if request.data['password1'] != request.data['password2']:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={'password1': "password fields dont match !"})
if not user.check_password(request.data['old_password']):
return Response(
status=status.HTTP_403_FORBIDDEN,
data={"old_password": "old password is not correct !"})
try:
validate_password(request.data['password1'], user=user)
except ValidationError as ex:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": {"password": ex}})
instance = User.objects.get(profile__key=key)
instance.set_password(request.data['password1'])
instance.save()
return Response(
status=status.HTTP_200_OK, data={"detail": "password changed"})
class UpdateProfileView(generics.UpdateAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = UserProfileSerializer
def put(self, request, key, format=None):
user = request.user
if user.profile.key != key:
return Response(
status=status.HTTP_401_UNAUTHORIZED,
data={"authorize": "you dont have permission for this user !"})
if len(request.data) == 0:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "no field modified"})
instance = User.objects.get(profile__key=key)
if 'first_name' in request.data:
instance.first_name = request.data['first_name']
if 'last_name' in request.data:
instance.last_name = request.data['last_name']
if 'email' in request.data:
if User.objects.exclude(profile__key=user.profile.key).filter(email=request.data['email']).exists():
return Response(
status=status.HTTP_406_NOT_ACCEPTABLE,
data={"email": "this email is already in use !"})
try:
validate_email(request.data['email'])
except ValidationError as ex:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": {"email": ex}})
instance.email = request.data['email']
if 'username' in request.data:
if User.objects.exclude(profile__key=user.profile.key).filter(username=request.data['username']).exists():
return Response(
status=status.HTTP_406_NOT_ACCEPTABLE,
data={"username": "this username is not available !"})
instance.username = request.data['username']
instance.save()
return Response(status=status.HTTP_200_OK, data={"detail": "updated"})
class UserLoginView(TokenViewBase):
serializer_class = UserLoginSerializer
permission_classes = (AllowAny, )
class UpdateUserImageView(generics.UpdateAPIView):
parser_classes = (MultiPartParser, )
permission_classes = (IsAuthenticated,)
def put(self, request, key, format=None):
user = request.user
if user.profile.key != key:
return Response(status=status.HTTP_401_UNAUTHORIZED, data={
'detail': {
"authorize": "you dont have permission for this user !"
}
})
if 'image' in request.data:
profile = get_object_or_404(Profile, key=key)
profile.image = request.data['image']
profile.user = user
profile.save()
return Response(
status=status.HTTP_200_OK, data={"detail": 'modified'})
else:
return Response(
status=status.HTTP_404_NOT_FOUND,
data={'detail': {'required': 'no new image provided.'}})
class LogoutView(APIView):
permission_classes = (IsAuthenticated,)
def post(self, request):
try:
refresh_token = request.data["refresh_token"]
token = RefreshToken(refresh_token)
token.blacklist()
return Response(
status=status.HTTP_205_RESET_CONTENT,
data={'detail': "logged out"})
except Exception:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": "refresh_token is not valid"})
class DeleteProfileView(APIView):
permission_classes = (IsAuthenticated,)
def delete(self, request, key, format=None):
user = request.user
if user.profile.key != key:
return Response(
data={"detail": "unauthorized"},
status=status.HTTP_401_UNAUTHORIZED)
if 'password' not in request.data:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={'detail': 'password-required'})
if not user.check_password(request.data['password']):
return Response(
status=status.HTTP_403_FORBIDDEN,
data={'detail': "password-incorrect"})
user.is_active = False
user.save()
return Response(status=status.HTTP_200_OK, data={"detail": "deleted"})
class ForgotPasswordView(APIView):
permission_classes = (AllowAny,)
def post(self, request, format=None):
if 'email' not in request.data:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={'detail': {'email': 'required'}})
try:
validate_email(request.data['email'])
except ValidationError:
return Response(
status=status.HTTP_400_BAD_REQUEST,
یata={"detail": {"email": "enter a valid email address!"}})
try:
user = User.objects.get(email=request.data["email"])
except User.DoesNotExist:
time.sleep(3)
return Response(status=status.HTTP_200_OK, data={'detail': "sent"})
mail_subject = 'Reset Your Password'
server_code = random.randint(10000, 999999)
name = "user"
if not user.first_name == "":
name = user.first_name
message = 'Hi {0},\nthis is your password reset code:\n{1}'.format(name, server_code)
to_email = user.email
EmailMessage(mail_subject, message, to=[to_email]).send()
request.session['code'] = server_code
request.session['user'] = user.username
return Response(status=status.HTTP_200_OK, data={'detail': "sent"})
class ValidateConfirmationCodeView(APIView):
permission_classes = (AllowAny,)
def post(self, request, format=None):
if 'code' not in request.session and 'user' not in request.session:
return Response(
status=status.HTTP_404_NOT_FOUND,
data={'detail': 'session not found.'})
if 'code' not in request.data:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={'detail': {"code": "required"}})
if not int(request.data['code']) == int(request.session.get('code')):
return Response(
status=status.HTTP_403_FORBIDDEN,
data={'detail': 'wrong-code'})
return Response(
status=status.HTTP_200_OK,
data={'key': get_object_or_404(User, username=request.session['user']).profile.key})
class ResetPasswordView(APIView):
""" """
permission_classes = (AllowAny,)
def put(self, request, key, format=None):
user = get_object_or_404(User, username=request.session.get('user'))
if user.profile.key != key:
return Response(
status=status.HTTP_401_UNAUTHORIZED,
data={"detail": "unauthorized"})
if not 'password' and 'again' in request.data:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={'detail': {"password": "required", 'again': 'required'}})
if request.data["password"] != request.data['again']:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={'detail': {"password": 'not-matched'}})
try:
validate_password(request.data['password'], user=user)
except ValidationError as ex:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"detail": {"password": ex}})
user.set_password(request.data['password'])
user.save()
request.session.flush()
return Response(status=status.HTTP_200_OK, data={'detail': 'done'})
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,975
|
F4R4N/film-review
|
refs/heads/main
|
/blog/urls.py
|
from django.urls import path
from .views import (
CreateAndGetUserPost, EditAndDeletePost, AllPublicPostsPaginated, DesiredPost,
GroupPublicPostsPaginated, CreateComment, EditAndDeleteComment)
app_name = "blog"
urlpatterns = [
path("post/", CreateAndGetUserPost.as_view()),
path("post/<str:post_key>/", EditAndDeletePost.as_view()),
path("posts/all/", AllPublicPostsPaginated.as_view()),
path("posts/<str:post_key>/", DesiredPost.as_view()),
path("posts/group/<str:group_key>/", GroupPublicPostsPaginated.as_view()),
path("comment/create/<str:post_key>/", CreateComment.as_view()),
path("comment/<str:comment_key>/", EditAndDeleteComment.as_view()),
]
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
10,976
|
F4R4N/film-review
|
refs/heads/main
|
/customauth/admin.py
|
from django.contrib import admin
from .models import Profile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
@admin.register(Profile)
class ProfileAdmin(admin.ModelAdmin):
list_display = ("user", "image", "key", )
# changed list_display and list_editable but keep default settings by inheriting UserAdmin
class MyUserAdmin(UserAdmin):
list_display = (
"username", "first_name", "last_name", "email", "is_active", "is_staff")
list_editable = ("email", "is_active",)
# to save changes should unregister and re-register the model in Admin Panel
admin.site.unregister(User)
admin.site.register(User, MyUserAdmin)
|
{"/contactus/urls.py": ["/contactus/views.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/blog/serializers.py", "/blog/utils.py"], "/api/views_utils.py": ["/api/models.py"], "/customauth/models.py": ["/api/models.py", "/api/utils.py"], "/blog/serializers.py": ["/blog/models.py"], "/contactus/serializers.py": ["/contactus/models.py"], "/contactus/views.py": ["/contactus/models.py", "/contactus/utils.py", "/contactus/serializers.py"], "/api/serializers.py": ["/api/models.py", "/customauth/models.py"], "/customauth/serializers.py": ["/customauth/models.py"], "/customauth/urls.py": ["/customauth/views.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/views_utils.py", "/api/utils.py", "/customauth/models.py", "/config/settings.py"], "/blog/admin.py": ["/blog/models.py"], "/api/urls.py": ["/api/views.py"], "/contactus/models.py": ["/contactus/utils.py"], "/api/models.py": ["/api/utils.py"], "/api/admin.py": ["/api/models.py"], "/blog/models.py": ["/api/utils.py", "/blog/utils.py"], "/customauth/views.py": ["/customauth/models.py", "/customauth/serializers.py"], "/blog/urls.py": ["/blog/views.py"], "/customauth/admin.py": ["/customauth/models.py"]}
|
11,023
|
haw230/dynamic-fibonacci
|
refs/heads/master
|
/tests/test_main.py
|
# -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '/home/ubuntu/workspace/dynamic_fibonacci') #change this to correct folder name
sys.path.insert(0, '/home/ubuntu/workspace/solution')
from main import fib #change to proper function name
from solution import solved_fib #change to proper function name
from time import sleep
from random import sample
class aethetics(object):
BLUE = '\033[94m'
GREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
END = '\033[0m'
BOLD = '\033[1m'
LINE = '\033[95m———————————————————————————————————————————————————————————————\033[0m'
class TestCases(object):
def __init__(self):
self.passed_tests = 0
self.total_tests = 0
def tests(self): #add tests here
f1,f2 = fib, solved_fib #change to proper function names
self.test(f1, f2, 4)
self.test(f1, f2, 5)
self.test(f1, f2, 6)
self.test(f1, f2, 7)
self.test(f1, f2, 8)
self.test(f1, f2, 9)
self.test(f1, f2, 30)
self.test(f1, f2, 60)
def test(self, func1, func2, n):
x, y = func1(n), func2(n)
if(x == y):
print(aethetics.GREEN + 'Test passed with param of ' + aethetics.BLUE + str(n) + aethetics.END + '\n' + aethetics.END)
print(aethetics.BOLD + str(x) + aethetics.END + ' matches the answer ' + aethetics.BOLD + str(y) + aethetics.END)
self.passed_tests += 1
else:
print(aethetics.FAIL + 'Test failed with param of ' + aethetics.BLUE + str(n) + aethetics.END + '\n' + aethetics.END)
print(aethetics.BOLD + str(x) + aethetics.END + ' does not match the answer ' + aethetics.BOLD + str(y) + aethetics.END)
self.total_tests += 1
print(aethetics.LINE + '\n')
sleep(0.7)
def end(self):
if(self.passed_tests == self.total_tests):
print(aethetics.GREEN + 'All ' + str(self.total_tests) + ' tests passed.' + aethetics.END)
else:
print(aethetics.WARNING + 'Passed ' + str(self.passed_tests) + ' of ' + str(self.total_tests) + ' tests.' + aethetics.END)
if(__name__ == '__main__'):
print('\n' + aethetics.GREEN + "Running Tests..." + aethetics.END)
print(aethetics.LINE + '\n')
t = TestCases()
t.tests()
t.end()
|
{"/tests/test_main.py": ["/solution/__init__.py"]}
|
11,024
|
haw230/dynamic-fibonacci
|
refs/heads/master
|
/solution/solution.py
|
memo = {}
def solved_fib(n):
if(n < 2):
return 1
if(n in memo):
return memo[n]
memo[n] = solved_fib(n - 1) + solved_fib(n - 2)
return memo[n]
|
{"/tests/test_main.py": ["/solution/__init__.py"]}
|
11,025
|
haw230/dynamic-fibonacci
|
refs/heads/master
|
/solution/__init__.py
|
#obligatory __init__.py so Python knows to look here
|
{"/tests/test_main.py": ["/solution/__init__.py"]}
|
11,026
|
GustavoAT/back-end-challenge-2021
|
refs/heads/master
|
/tests/test_endpoints.py
|
from typing import List
from fastapi.testclient import TestClient
from app.main import app
client = TestClient(app)
def test_main():
'''Test root of API'''
response = client.get('/')
assert response.status_code == 200
assert response.json()['message'] == 'REST Back-end Challenge 20201209 Running'
def test_get_users():
'''Test the /users endpoint'''
response = client.get('/users/')
assert response.status_code == 200
assert type(response.json()) == list
def test_get_user():
'''Test the /users/:user_id endpoint'''
response = client.get('/users/')
response_list = response.json()
if len(response_list) > 0:
user_id = response_list[0]['id']
response = client.get(f'/users/{user_id}')
assert response.status_code == 200
def test_create_update_delete_user():
'''Test creation, updating and deletion of an user'''
user_data = {
"gender": "male",
"name_first": "J",
"name_last": "D",
"email": "jon.doe@example.com",
"login_uuid": "20c83553-1551-4e76-2234-4181ea561139",
"login_username": "goodusername",
"login_password": "mypassisnotsafe",
"dob_date": "2000-04-01T15:52:08",
"imported_t": "2021-08-06T21:20:00"
}
response = client.post('/users/', json=user_data)
assert response.status_code == 200
user_id = response.json()['id']
user_data['name_last'] = 'Doe'
response = client.put(f'/users/{user_id}', json=user_data)
assert response.status_code == 200
assert response.json()['name_last'] == 'Doe'
response = client.delete(f'/users/{user_id}')
assert response.status_code == 200
assert response.json()['message'] == f'Usuário de id {user_id} excluído'
|
{"/tests/test_endpoints.py": ["/app/main.py"], "/app/persistence/userDAO.py": ["/app/persistence/security/hash.py"], "/app/apiclient/randomuser.py": ["/app/persistence/models.py", "/app/persistence/pdmodels.py", "/app/persistence/database.py", "/app/persistence/userDAO.py", "/app/persistence/pagDAO.py"], "/app/persistence/models.py": ["/app/persistence/database.py", "/app/persistence/enumerators.py"], "/app/routes/users.py": ["/app/persistence/database.py"], "/app/persistence/pagDAO.py": ["/app/persistence/models.py"], "/app/persistence/pdmodels.py": ["/app/persistence/enumerators.py"]}
|
11,027
|
GustavoAT/back-end-challenge-2021
|
refs/heads/master
|
/app/persistence/userDAO.py
|
from sqlalchemy.orm import Session
from sqlalchemy import update
from . import models, pdmodels
from .security.hash import create_salt, get_hashed_password
def get_user(db: Session, user_id: int):
return db.query(models.User).filter(models.User.id == user_id).first()
def get_user_by_unique_data(db: Session, user: pdmodels.UserCreate):
query_user = db.query(models.User).filter(
models.User.email == user.email,
models.User.login_username == user.login_username,
models.User.login_uuid == user.login_uuid
)
db_user = query_user.first()
return db_user
def get_users(db: Session, skip: int = 0, limit: int = 20):
return db.query(models.User).slice(skip, limit).all()
def create_user(db: Session, user: pdmodels.UserCreate):
if user.login_salt is None or user.login_salt == '':
user.login_salt = create_salt()
user.login_password = get_hashed_password(user.login_password,
user.login_salt)
db_user = models.User(**user.dict())
db.add(db_user)
db.commit()
db.refresh(db_user)
return db_user
def update_user(db: Session, user_id: int, user: pdmodels.UserBase):
update_statement = update(models.User).where(models.User.id == user_id).\
values(**user.dict()).execution_options(synchronize_session='fetch')
db.execute(update_statement)
db.commit()
return get_user(db, user_id)
def upsert_user(db: Session, user: pdmodels.UserCreate):
if user.login_salt is None or user.login_salt == '':
user.login_salt = create_salt()
user.login_password = get_hashed_password(user.login_password,
user.login_salt)
db_user = get_user_by_unique_data(db, user)
if db_user:
update_user(db, db_user.id, user)
else:
db_user = models.User(**user.dict())
db.add(db_user)
db.commit()
db.refresh(db_user)
return db_user
def delete_user(db: Session, user_id: int):
db_user = get_user(db, user_id)
if db_user:
db.delete(db_user)
db.commit()
return True
return False
|
{"/tests/test_endpoints.py": ["/app/main.py"], "/app/persistence/userDAO.py": ["/app/persistence/security/hash.py"], "/app/apiclient/randomuser.py": ["/app/persistence/models.py", "/app/persistence/pdmodels.py", "/app/persistence/database.py", "/app/persistence/userDAO.py", "/app/persistence/pagDAO.py"], "/app/persistence/models.py": ["/app/persistence/database.py", "/app/persistence/enumerators.py"], "/app/routes/users.py": ["/app/persistence/database.py"], "/app/persistence/pagDAO.py": ["/app/persistence/models.py"], "/app/persistence/pdmodels.py": ["/app/persistence/enumerators.py"]}
|
11,028
|
GustavoAT/back-end-challenge-2021
|
refs/heads/master
|
/app/persistence/database.py
|
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from ..settings import SQLALCHEMY_DATABASE_URL
if 'sqlite://' in SQLALCHEMY_DATABASE_URL:
engine = create_engine(
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
)
else:
engine = create_engine(SQLALCHEMY_DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine,)
Base = declarative_base()
|
{"/tests/test_endpoints.py": ["/app/main.py"], "/app/persistence/userDAO.py": ["/app/persistence/security/hash.py"], "/app/apiclient/randomuser.py": ["/app/persistence/models.py", "/app/persistence/pdmodels.py", "/app/persistence/database.py", "/app/persistence/userDAO.py", "/app/persistence/pagDAO.py"], "/app/persistence/models.py": ["/app/persistence/database.py", "/app/persistence/enumerators.py"], "/app/routes/users.py": ["/app/persistence/database.py"], "/app/persistence/pagDAO.py": ["/app/persistence/models.py"], "/app/persistence/pdmodels.py": ["/app/persistence/enumerators.py"]}
|
11,029
|
GustavoAT/back-end-challenge-2021
|
refs/heads/master
|
/app/apiclient/randomuser.py
|
import sys
import requests
from pydantic import parse_obj_as
from ..persistence.models import Pag
from ..persistence.pdmodels import UserCreate
from ..persistence.database import SessionLocal
from ..persistence.userDAO import upsert_user
from ..persistence.pagDAO import get_first_pag, create_pag
from datetime import datetime
RANDOMUSER_URL = 'https://randomuser.me/api/'
RANDOMUSER_NESTED_LEVELS = 2
def get_users(n_users: int = 10, seed: str = None, page: int = None):
"""Get users from randomuser API
Args:
n_users (int, optional): Number of results. Defaults to 10.
seed (str, optional): Seed to find the same users again. Defaults to
None.
page (int, optional): Page of results. Defaults to None.
Returns:
Response
"""
params = {'nat': 'BR,CA,ES,GB,NZ,US'}
if seed:
params['seed'] = seed
if page:
params['page'] = page
if n_users > 1:
params['results'] = n_users
result = requests.get(RANDOMUSER_URL, params=params)
return result
def save_random_users(n_users: int = 10):
"""Save new random users on the database
Get a number of users from randomuser API and save them on the
database.
Args:
n_users (int, optional): Number os users to get. Defaults to 10.
"""
result = get_users(n_users)
if result.status_code == 200:
save_users(result)
def save_random_users_paginated():
"""Save random users on the database folowing a pagination rule
The pagination rule should be previously saved on the database.
The actual page is retrieved and next page is requested to
randomuser API. Case all pages from pagination rule is done,
next page would be 1.
Returns:
int: Page number saved
"""
with SessionLocal() as db:
pagination = get_first_pag(db)
total = pagination.actual_page * pagination.step
step = pagination.step
if total >= pagination.total_records:
page = 1
else:
page = pagination.actual_page + 1
total = page * step
difference = total - pagination.total_records
if difference > 0:
step -= difference
result = get_users(step, pagination.seed, page)
if result.status_code == 200:
pagination.actual_page = page
pagination.date = datetime.now()
db.commit()
save_users(result)
return page
return None
def save_users(request_result):
"""Save users on the database
Args:
request_result (Response): response object from randomuser API
"""
users_list = request_result.json()['results']
users_list = [
flatten_many_levels(d, RANDOMUSER_NESTED_LEVELS) for d in users_list
]
users_list = [add_imported_time(d) for d in users_list]
users = parse_obj_as(list[UserCreate], users_list)
with SessionLocal() as db:
for user in users:
upsert_user(db, user)
def flatten_one_level(target: dict):
"""Make second level from nested dictionary equal to the first level
Flatten a dictionary by find inner dictionaries and unpack it.
The inner keys are concated to outer key.
Args:
target (dict): dictionary to be flatten
Returns:
dict: one level flatten dict
"""
flat_dict = {}
for key in target:
if type(target[key]) == dict:
for key2, value in target[key].items():
flat_dict[key + '_' + key2] = value
else:
flat_dict[key] = target[key]
return flat_dict
def flatten_many_levels(target: dict, levels: int = 2):
"""Make nested levels on a dictionary equal to the first
Args:
target (dict): dictionary to be flatten
levels (int, optional): nested levels on the dictionary. Defaults to 2.
Returns:
dict: n levels flatten dict
"""
flat_dict = target
for _ in range(levels):
flat_dict = flatten_one_level(flat_dict)
return flat_dict
def add_imported_time(target: dict):
'''Add imported time to a dict'''
target['imported_t'] = datetime.now()
return target
def init_pagination(step: int, total: int):
"""Create a pagination rule on the database
Args:
step (int): Records per page
total (int): Total records to request
"""
pagination = Pag()
pagination.actual_page = 0
pagination.step = step
pagination.total_records = total
pagination.date = datetime.now()
rec = get_users(1)
pagination.seed = rec.json()['info']['seed']
with SessionLocal() as db:
create_pag(db, pagination)
if __name__ == '__main__':
command = sys.argv[1]
if command == 'initpag':
if len(sys.argv) > 3:
try:
step = int(sys.argv[2])
total = int(sys.argv[3])
init_pagination(step, total)
print(
f'Paginação configurada para {total} ' +
f'usuarios de {step} em {step}'
)
except TypeError:
print('Valores de passo e total devem ser inteiros positivos')
else:
print('Argumentos ausentes para esse comando')
if command == 'save':
try:
number_users = int(sys.argv[2])
save_random_users(number_users)
print(f'{number_users} usuarios inseridos ou atualizados no banco')
except TypeError:
print('Insira um valor inteiro positivo como argumento')
if command == 'savenextpage':
r = save_random_users_paginated()
if r:
print(f'Página {r} salva')
else:
print(
'Erro no salvamento, não foi possível ' +
'obter usuários do randomuser'
)
|
{"/tests/test_endpoints.py": ["/app/main.py"], "/app/persistence/userDAO.py": ["/app/persistence/security/hash.py"], "/app/apiclient/randomuser.py": ["/app/persistence/models.py", "/app/persistence/pdmodels.py", "/app/persistence/database.py", "/app/persistence/userDAO.py", "/app/persistence/pagDAO.py"], "/app/persistence/models.py": ["/app/persistence/database.py", "/app/persistence/enumerators.py"], "/app/routes/users.py": ["/app/persistence/database.py"], "/app/persistence/pagDAO.py": ["/app/persistence/models.py"], "/app/persistence/pdmodels.py": ["/app/persistence/enumerators.py"]}
|
11,030
|
GustavoAT/back-end-challenge-2021
|
refs/heads/master
|
/app/persistence/models.py
|
import sys
from sqlalchemy import Column, Integer, String, Enum, Float
from sqlalchemy import DateTime, SmallInteger
from .database import Base, engine
from .enumerators import GendersEnum, StatusEnum
class User(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True, index=True)
gender = Column(Enum(GendersEnum))
name_title = Column(String(4))
name_first = Column(String(30), nullable=False)
name_last = Column(String(30), nullable=False)
location_street = Column(String(40))
location_city = Column(String(80))
location_state = Column(String(40))
location_postcode = Column(String(20))
location_coordinates_latitude = Column(Float)
location_coordinates_longitude = Column(Float)
location_timezone_offset = Column(String(6))
location_timezone_description = Column(String(80))
email = Column(String(60), unique=True)
login_uuid = Column(String(40), unique=True)
login_username = Column(String(40), unique=True, nullable=False)
login_password = Column(String(70), nullable=False)
login_salt = Column(String(20), nullable=False)
dob_date = Column(DateTime, nullable=False)
dob_age = Column(SmallInteger)
registered_date = Column(DateTime)
registered_age = Column(SmallInteger)
phone = Column(String(15))
cell = Column(String(15))
id_name = Column(String(10))
id_value = Column(String(20))
picture_large = Column(String(60))
picture_medium = Column(String(60))
picture_thumbnail = Column(String(60))
nat = Column(String(2))
imported_t = Column(DateTime, nullable=False)
status = Column(Enum(StatusEnum), nullable=False)
class Pag(Base):
__tablename__ = "pagination"
id = Column(Integer, primary_key=True, index=True)
seed = Column(String(40))
actual_page = Column(Integer)
step = Column(Integer)
total_records = Column(Integer)
date = Column(DateTime)
def create_tables():
Base.metadata.create_all(bind=engine)
def delete_tables():
Base.metadata.drop_all(bind=engine)
if __name__ == '__main__':
if sys.argv[1]:
if sys.argv[1] == 'createtables':
delete_tables()
create_tables()
print('Tabelas criadas')
else:
print('Este comando espera pelo menos um argumento')
|
{"/tests/test_endpoints.py": ["/app/main.py"], "/app/persistence/userDAO.py": ["/app/persistence/security/hash.py"], "/app/apiclient/randomuser.py": ["/app/persistence/models.py", "/app/persistence/pdmodels.py", "/app/persistence/database.py", "/app/persistence/userDAO.py", "/app/persistence/pagDAO.py"], "/app/persistence/models.py": ["/app/persistence/database.py", "/app/persistence/enumerators.py"], "/app/routes/users.py": ["/app/persistence/database.py"], "/app/persistence/pagDAO.py": ["/app/persistence/models.py"], "/app/persistence/pdmodels.py": ["/app/persistence/enumerators.py"]}
|
11,031
|
GustavoAT/back-end-challenge-2021
|
refs/heads/master
|
/app/settingsexample.py
|
""" Environment settings
Edit database credentials
user, password, host and database on the url,
the APIkey and so on, then
rename the file to 'settings.py'.
The url is for MySQL, to use other database engines,
search documentation on SQLAlchemy:
https://docs.sqlalchemy.org/en/14/dialects/index.html
"""
#Use format:
#mysql+pymysql://[user]:[password]@[host][/database][?options]
SQLALCHEMY_DATABASE_URL = 'mysql+pymysql://root:rootpass@db/backendchallenge'
API_KEY = 'mysecretapikey'
|
{"/tests/test_endpoints.py": ["/app/main.py"], "/app/persistence/userDAO.py": ["/app/persistence/security/hash.py"], "/app/apiclient/randomuser.py": ["/app/persistence/models.py", "/app/persistence/pdmodels.py", "/app/persistence/database.py", "/app/persistence/userDAO.py", "/app/persistence/pagDAO.py"], "/app/persistence/models.py": ["/app/persistence/database.py", "/app/persistence/enumerators.py"], "/app/routes/users.py": ["/app/persistence/database.py"], "/app/persistence/pagDAO.py": ["/app/persistence/models.py"], "/app/persistence/pdmodels.py": ["/app/persistence/enumerators.py"]}
|
11,032
|
GustavoAT/back-end-challenge-2021
|
refs/heads/master
|
/app/routes/users.py
|
from typing import List
from fastapi import APIRouter
from fastapi import Depends, HTTPException
from fastapi.param_functions import Header
from sqlalchemy.orm import Session
from ..persistence.database import SessionLocal
from ..persistence import pdmodels, userDAO
from ..settings import API_KEY
router = APIRouter()
# Dependency
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@router.get('/users/', response_model=List[pdmodels.User])
async def get_users(skip: int = 0, limit: int = 10,
db: Session = Depends(get_db), key: str = Header(None)):
if key != API_KEY:
raise HTTPException(status_code=401, detail='Não autorizado')
users = userDAO.get_users(db, skip, limit)
return users
@router.post('/users/', response_model=pdmodels.User)
async def create_user(user: pdmodels.UserCreate,
db: Session = Depends(get_db), key: str = Header(None)):
if key != API_KEY:
raise HTTPException(status_code=401, detail='Não autorizado')
db_user = userDAO.get_user_by_unique_data(db, user)
if db_user:
raise HTTPException(status_code=400,
detail='e-mail, login ou uuid já cadastrado')
return userDAO.create_user(db, user)
@router.get('/users/{user_id}', response_model=pdmodels.User)
async def get_user(user_id: int, db: Session = Depends(get_db),
key: str = Header(None)):
if key != API_KEY:
raise HTTPException(status_code=401, detail='Não autorizado')
db_user = userDAO.get_user(db, user_id)
if db_user is None:
raise HTTPException(status_code=404, detail='Usuário não encontrado')
return db_user
@router.put('/users/{user_id}')
async def update_user(user_id: int, user: pdmodels.UserBase,
db: Session = Depends(get_db), key: str = Header(None)):
if key != API_KEY:
raise HTTPException(status_code=401, detail='Não autorizado')
db_user = userDAO.get_user(db, user_id)
if db_user is None:
raise HTTPException(status_code=404, detail='Usuário não encontrado')
return userDAO.update_user(db, user_id, user)
@router.delete('/users/{user_id}')
async def delete_user(user_id: int, db: Session = Depends(get_db),
key: str = Header(None)):
if key != API_KEY:
raise HTTPException(status_code=401, detail='Não autorizado')
success = userDAO.delete_user(db, user_id)
if not success:
raise HTTPException(status_code=404, detail='Usuário não encontrado')
return {'message': f'Usuário de id {user_id} excluído'}
|
{"/tests/test_endpoints.py": ["/app/main.py"], "/app/persistence/userDAO.py": ["/app/persistence/security/hash.py"], "/app/apiclient/randomuser.py": ["/app/persistence/models.py", "/app/persistence/pdmodels.py", "/app/persistence/database.py", "/app/persistence/userDAO.py", "/app/persistence/pagDAO.py"], "/app/persistence/models.py": ["/app/persistence/database.py", "/app/persistence/enumerators.py"], "/app/routes/users.py": ["/app/persistence/database.py"], "/app/persistence/pagDAO.py": ["/app/persistence/models.py"], "/app/persistence/pdmodels.py": ["/app/persistence/enumerators.py"]}
|
11,033
|
GustavoAT/back-end-challenge-2021
|
refs/heads/master
|
/app/persistence/pagDAO.py
|
from sqlalchemy.orm import Session
from .models import Pag
def get_pag(db: Session, pag_id: int):
return db.query(Pag).filter(Pag.id == pag_id).first()
def get_first_pag(db: Session):
return db.query(Pag).first()
def create_pag(db: Session, pagination: Pag):
db.add(pagination)
db.commit()
db.refresh(pagination)
return pagination
def update_pag(db: Session, pagination: Pag):
db_pag = db.query(Pag).filter(Pag.id == pagination.id).first()
db_pag.seed = pagination.seed
db_pag.actual_page = pagination.actual_page
db_pag.step = pagination.step
db_pag.total_records = pagination.total_records
db_pag.date = pagination.date
db.commit()
db.refresh(db_pag)
return db_pag
def delete_pag(db: Session, pag_id: int):
db_pag = get_pag(db, pag_id)
if db_pag:
db.delete(db_pag)
db.commit()
|
{"/tests/test_endpoints.py": ["/app/main.py"], "/app/persistence/userDAO.py": ["/app/persistence/security/hash.py"], "/app/apiclient/randomuser.py": ["/app/persistence/models.py", "/app/persistence/pdmodels.py", "/app/persistence/database.py", "/app/persistence/userDAO.py", "/app/persistence/pagDAO.py"], "/app/persistence/models.py": ["/app/persistence/database.py", "/app/persistence/enumerators.py"], "/app/routes/users.py": ["/app/persistence/database.py"], "/app/persistence/pagDAO.py": ["/app/persistence/models.py"], "/app/persistence/pdmodels.py": ["/app/persistence/enumerators.py"]}
|
11,034
|
GustavoAT/back-end-challenge-2021
|
refs/heads/master
|
/app/main.py
|
from fastapi import FastAPI
from .routes import users
app = FastAPI()
app.include_router(users.router)
@app.get('/')
async def root():
return {'message': 'REST Back-end Challenge 20201209 Running'}
|
{"/tests/test_endpoints.py": ["/app/main.py"], "/app/persistence/userDAO.py": ["/app/persistence/security/hash.py"], "/app/apiclient/randomuser.py": ["/app/persistence/models.py", "/app/persistence/pdmodels.py", "/app/persistence/database.py", "/app/persistence/userDAO.py", "/app/persistence/pagDAO.py"], "/app/persistence/models.py": ["/app/persistence/database.py", "/app/persistence/enumerators.py"], "/app/routes/users.py": ["/app/persistence/database.py"], "/app/persistence/pagDAO.py": ["/app/persistence/models.py"], "/app/persistence/pdmodels.py": ["/app/persistence/enumerators.py"]}
|
11,035
|
GustavoAT/back-end-challenge-2021
|
refs/heads/master
|
/app/persistence/enumerators.py
|
from enum import Enum
class GendersEnum(str, Enum):
male = 'male'
female = 'female'
class StatusEnum(str, Enum):
draft = 'draft'
trash = 'trash'
published = 'published'
|
{"/tests/test_endpoints.py": ["/app/main.py"], "/app/persistence/userDAO.py": ["/app/persistence/security/hash.py"], "/app/apiclient/randomuser.py": ["/app/persistence/models.py", "/app/persistence/pdmodels.py", "/app/persistence/database.py", "/app/persistence/userDAO.py", "/app/persistence/pagDAO.py"], "/app/persistence/models.py": ["/app/persistence/database.py", "/app/persistence/enumerators.py"], "/app/routes/users.py": ["/app/persistence/database.py"], "/app/persistence/pagDAO.py": ["/app/persistence/models.py"], "/app/persistence/pdmodels.py": ["/app/persistence/enumerators.py"]}
|
11,036
|
GustavoAT/back-end-challenge-2021
|
refs/heads/master
|
/app/persistence/pdmodels.py
|
from datetime import datetime
from typing import Optional
from pydantic import BaseModel
from .enumerators import GendersEnum, StatusEnum
class UserBase(BaseModel):
gender: GendersEnum
name_title: Optional[str] = None
name_first: Optional[str] = None
name_last: Optional[str] = None
location_street: Optional[str] = None
location_city: Optional[str] = None
location_state: Optional[str] = None
location_postcode: Optional[str] = None
location_coordinates_latitude: Optional[float] = None
location_coordinates_longitude: Optional[float] = None
location_timezone_offset: Optional[str] = None
location_timezone_description: Optional[str] = None
email: str
login_uuid: str
login_username: str
dob_date: datetime
dob_age: Optional[int] = None
registered_date: Optional[datetime] = None
registered_age: Optional[int] = None
phone: Optional[str] = None
cell: Optional[str] = None
id_name: Optional[str] = None
id_value: Optional[str] = None
picture_large: Optional[str] = None
picture_medium: Optional[str] = None
picture_thumbnail: Optional[str] = None
nat: Optional[str] = None
imported_t: datetime
status: StatusEnum = StatusEnum.draft
class UserCreate(UserBase):
login_password: str
login_salt: Optional[str] = None
class User(UserBase):
id: int
class Config:
orm_mode = True
|
{"/tests/test_endpoints.py": ["/app/main.py"], "/app/persistence/userDAO.py": ["/app/persistence/security/hash.py"], "/app/apiclient/randomuser.py": ["/app/persistence/models.py", "/app/persistence/pdmodels.py", "/app/persistence/database.py", "/app/persistence/userDAO.py", "/app/persistence/pagDAO.py"], "/app/persistence/models.py": ["/app/persistence/database.py", "/app/persistence/enumerators.py"], "/app/routes/users.py": ["/app/persistence/database.py"], "/app/persistence/pagDAO.py": ["/app/persistence/models.py"], "/app/persistence/pdmodels.py": ["/app/persistence/enumerators.py"]}
|
11,037
|
GustavoAT/back-end-challenge-2021
|
refs/heads/master
|
/app/persistence/security/hash.py
|
"""Hash and cryptografy functions.
Helper on password and secrets security;
"""
import string
from hashlib import sha256
import secrets
ALPHABET = string.ascii_letters + string.digits
def get_hashed_password(password: str, salt: str):
salted_password = password + salt
return sha256(salted_password.encode()).hexdigest()
def create_salt(size: int = 8):
salt = ''.join([secrets.choice(ALPHABET) for i in range(size)])
return salt
|
{"/tests/test_endpoints.py": ["/app/main.py"], "/app/persistence/userDAO.py": ["/app/persistence/security/hash.py"], "/app/apiclient/randomuser.py": ["/app/persistence/models.py", "/app/persistence/pdmodels.py", "/app/persistence/database.py", "/app/persistence/userDAO.py", "/app/persistence/pagDAO.py"], "/app/persistence/models.py": ["/app/persistence/database.py", "/app/persistence/enumerators.py"], "/app/routes/users.py": ["/app/persistence/database.py"], "/app/persistence/pagDAO.py": ["/app/persistence/models.py"], "/app/persistence/pdmodels.py": ["/app/persistence/enumerators.py"]}
|
11,038
|
messente/verigator-python
|
refs/heads/master
|
/messente/verigator/routes.py
|
URL = "https://api.verigator.com"
CREATE_SERVICE = "v1/service/service"
GET_SERVICE = "v1/service/service/{}"
DELETE_SERVICE = "v1/service/service/{}"
GET_USERS = "v1/service/service/{}/users"
GET_USER = "v1/service/service/{}/users/{}"
CREATE_USER = "v1/service/service/{}/users"
DELETE_USER = "v1/service/service/{}/users/{}"
AUTH_INITIATE = "v1/service/service/{}/users/{}/auth"
AUTH_VERIFY = "v1/service/service/{}/users/{}/auth"
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,039
|
messente/verigator-python
|
refs/heads/master
|
/messente/verigator/test/test_users.py
|
from unittest import TestCase
from mock import MagicMock
from messente.verigator import routes, client, controllers, exceptions
class TestUsers(TestCase):
def setUp(self):
self.rest_client = client.RestClient("http://test", "test", "test")
self.users = controllers.Users(self.rest_client)
self.sample_response = {
"id_in_service": "test2",
"ctime": "2017-09-15T08:13:26.965341",
"id": "38fb335c-025d-45eb-9cf2-d2d4d9f54203"
}
def tearDown(self):
pass
def test_create(self):
self.rest_client.post = MagicMock(return_value=self.sample_response)
res = self.users.create("service_id", "0123", "username")
self.rest_client.post.assert_called_with(routes.CREATE_USER.format("service_id"),
json={"id_in_service": "username", "phone_number": "0123"})
self.assertEqual(res.id, self.sample_response['id'])
self.assertEqual(res.creation_time, self.sample_response['ctime'])
self.assertEqual(res.username, self.sample_response['id_in_service'])
def test_get(self):
self.rest_client.get = MagicMock(return_value=self.sample_response)
res = self.users.get("sid", "uid")
self.rest_client.get.assert_called_with(routes.GET_USER.format("sid", "uid"))
self.assertEqual(res.id, self.sample_response['id'])
self.assertEqual(res.creation_time, self.sample_response['ctime'])
self.assertEqual(res.username, self.sample_response['id_in_service'])
def test_get_all(self):
self.rest_client.get = MagicMock(return_value={"users": [self.sample_response]})
res = self.users.get_all("sid")
self.rest_client.get.assert_called_with(routes.GET_USERS.format("sid"))
self.assertEqual(len(res), 1)
self.assertEqual(res[0].id, self.sample_response['id'])
self.assertEqual(res[0].creation_time, self.sample_response['ctime'])
self.assertEqual(res[0].username, self.sample_response['id_in_service'])
def test_delete(self):
self.rest_client.delete = MagicMock(return_value=self.sample_response)
res = self.users.delete("sid", "uid")
self.rest_client.delete.assert_called_with(routes.DELETE_USER.format("sid", "uid"))
self.assertTrue(res)
def test_create_failed(self):
self.rest_client.post = MagicMock(side_effect=exceptions.ResourceAlreadyExistsError(409, "message"))
try:
self.users.create("", "", "")
except exceptions.ResourceAlreadyExistsError as e:
self.assertEqual(e.code, 409)
self.assertEqual(e.message, "message")
else:
self.fail("Exception not raised")
def test_invalid_input(self):
self.assertRaises(ValueError, controllers.Users, None)
self.assertRaises(ValueError, self.users.create, None, None, None)
self.assertRaises(ValueError, self.users.get, None, None)
self.assertRaises(ValueError, self.users.get_all, None)
self.assertRaises(ValueError, self.users.delete, None, None)
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,040
|
messente/verigator-python
|
refs/heads/master
|
/messente/verigator/client.py
|
import requests
from messente.verigator import exceptions
class RestClient(object):
"""Simple http client that handles authentication and content-type
by default for post and put calls. Default headers are
content-type: application/json and accept: application/json, however they can be override
Note: If server returns any other status code except 2xx, client will raise appropriate exception
Attributes:
endpoint (str): server url, any other paths will be appended to it
auth_header (dict): default headers for each request (contains only auth header)
"""
def __init__(self, endpoint, username, password):
"""
Args:
endpoint (str): server url, any other paths will be appended to it
username (str): used for authentication
password (str): used for authentication
"""
self.endpoint = endpoint
self.auth_header = {
"X-Service-Auth": ":".join([username, password])
}
self.content_type_headers = {
"Content-Type": "application/json",
"Accept": "application/json"
}
def get(self, path, params=None, headers=None):
"""
Wrapper around requests get method
Args:
path (str): request path
params (dict): url parameters
headers (dict): additional headers
Returns:
dict: response body
"""
new_headers = self.__merge_dicts(self.auth_header, headers)
return self._request("GET", self.__url(path), params=params, headers=new_headers)
def post(self, path, headers=None, json=None):
"""
Wrapper around requests post method
Args:
path (str): request path
headers (dict): additional headers
json (dict): request payload
Returns:
dict: response body
"""
new_headers = self.__merge_dicts(self.auth_header, self.content_type_headers)
new_headers = self.__merge_dicts(new_headers, headers)
return self._request("POST", self.__url(path), headers=new_headers, json=json)
def put(self, path, headers=None, json=None):
"""
Wrapper around requests put method
Args:
path (str): request path
headers (dict): additional headers
json (dict): request payload
Returns:
dict: response body
"""
new_headers = self.__merge_dicts(self.auth_header, self.content_type_headers)
new_headers = self.__merge_dicts(new_headers, headers)
return self._request("PUT", self.__url(path), headers=new_headers, json=json)
def delete(self, path, headers=None):
"""
Wrapper around requests delete method
Args:
path (str): request path
headers (dict): additional headers
Returns:
dict: response body
"""
new_headers = self.__merge_dicts(self.auth_header, headers)
return self._request("DELETE", self.__url(path), headers=new_headers)
def __url(self, path):
return "/".join([self.endpoint.strip("/"), path])
@staticmethod
def __merge_dicts(first, second):
try:
new_headers = first.copy()
except AttributeError:
new_headers = {}
try:
new_headers.update(second)
except TypeError:
pass
return new_headers
@staticmethod
def _request(method, path, params=None, headers=None, json=None):
resp = requests.request(method, path, params=params, headers=headers, json=json)
status_code = resp.status_code
try:
resp_json = resp.json()
except ValueError:
raise exceptions.InvalidResponseError(0, resp.text)
message = resp_json.get('message', None)
if status_code == 400:
raise exceptions.InvalidDataError(400, message)
elif status_code == 401:
raise exceptions.WrongCredentialsError(401, message)
elif status_code == 403:
raise exceptions.ResourceForbiddenError(403, message)
elif status_code == 404:
raise exceptions.NoSuchResourceError(404, message)
elif status_code == 409:
raise exceptions.ResourceAlreadyExistsError(409, message)
elif status_code == 422:
raise exceptions.InvalidDataError(422, message)
elif status_code == 500:
raise exceptions.InternalError(500, resp_json)
elif 300 <= status_code <= 600:
raise exceptions.VerigatorError(status_code, resp_json)
return resp_json
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,041
|
messente/verigator-python
|
refs/heads/master
|
/messente/verigator/test/test_api.py
|
from unittest import TestCase
from messente.verigator import api
class ApiTest(TestCase):
def setUp(self):
self.api = api.Api("", "")
def test_contains_required_libs(self):
self.assertTrue(self.api.auth)
self.assertTrue(self.api.services)
self.assertTrue(self.api.users)
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,042
|
messente/verigator-python
|
refs/heads/master
|
/examples/example.py
|
from messente.verigator.api import Api
# initialize api
api = Api("username", "password")
# create example service
service = api.services.create("http://example.com", "service_name")
# add user to the created service
user = api.users.create(service.id, "+xxxxxxxxxxx", "username")
# initiate sms authentication, you can use api.auth.METHOD_TOTP for time
api.auth.initiate(service.id, user.id, api.auth.METHOD_SMS)
# check user input until successfull pin verification
while True:
try:
input = raw_input # Python 2 compatibility
except NameError:
pass
# read user input
token = input("Enter Sms Pin: ")
# verify pin
verified = api.auth.verify(service.id, user.id, token)
if verified:
break
print("Not Verified...")
print("Verified Successfully!")
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,043
|
messente/verigator-python
|
refs/heads/master
|
/messente/verigator/test/test_services.py
|
from unittest import TestCase
from mock import MagicMock
from messente.verigator import routes, client, controllers, exceptions
class TestServices(TestCase):
def setUp(self):
self.rest_client = client.RestClient("https://test", "test", "test")
self.services = controllers.Services(self.rest_client)
self.sample_response = {
"id": "id",
"ctime": "2017-09-15T06:44:15.274438",
"name": "name"
}
def tearDown(self):
pass
def test_create(self):
self.rest_client.post = MagicMock(return_value=self.sample_response)
res = self.services.create("domain", "name")
self.rest_client.post.assert_called_with(routes.CREATE_SERVICE, json={"fqdn": "domain", "name": "name"})
self.assertEqual(res.id, self.sample_response['id'])
self.assertEqual(res.creation_time, self.sample_response['ctime'])
self.assertEqual(res.name, self.sample_response['name'])
def test_get(self):
self.rest_client.get = MagicMock(return_value=self.sample_response)
res = self.services.get("id")
self.rest_client.get.assert_called_with(routes.GET_SERVICE.format("id"))
self.assertEqual(res.id, self.sample_response['id'])
self.assertEqual(res.creation_time, self.sample_response['ctime'])
self.assertEqual(res.name, self.sample_response['name'])
def test_delete(self):
self.rest_client.delete = MagicMock(return_value=self.sample_response)
res = self.services.delete("id")
self.rest_client.delete.assert_called_with(routes.DELETE_SERVICE.format("id"))
self.assertTrue(res)
def test_create_failed(self):
self.rest_client.post = MagicMock(side_effect=exceptions.ResourceAlreadyExistsError(409, "message"))
try:
self.services.create("", "")
except exceptions.ResourceAlreadyExistsError as e:
self.assertEqual(e.code, 409)
self.assertEqual(e.message, "message")
else:
self.fail("Exception not raised")
def test_invalid_input(self):
self.assertRaises(ValueError, controllers.Services, None)
self.assertRaises(ValueError, self.services.create, None, None, None)
self.assertRaises(ValueError, self.services.get, None, None)
self.assertRaises(ValueError, self.services.delete, None, None)
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,044
|
messente/verigator-python
|
refs/heads/master
|
/messente/verigator/test/test_auth.py
|
from unittest import TestCase
from mock import MagicMock
from messente.verigator import client, controllers, routes
class TestAuth(TestCase):
def setUp(self):
self.rest_client = client.RestClient("http://test", "test", "test")
self.auth = controllers.Auth(self.rest_client)
self.sms_init_response = {
"method": "sms"
}
self.totp_init_response = {
"method": "totp"
}
self.verified_response = {
"verified": True
}
self.failed_response = {
"verified": False,
"status": {
"throttled": False,
"expired": False,
"invalid": True,
"result": "INVALID"
},
}
def test_initiate_sms(self):
self.rest_client.post = MagicMock(return_value=self.sms_init_response)
self.auth.initiate("sid", "uid", self.auth.METHOD_SMS)
self.rest_client.post.assert_called_with(routes.AUTH_INITIATE.format("sid", "uid"),
json={"method": "sms"})
def test_initiate_totp(self):
self.rest_client.post = MagicMock(return_value=self.totp_init_response)
self.auth.initiate("sid", "uid", self.auth.METHOD_TOTP)
self.rest_client.post.assert_called_with(routes.AUTH_INITIATE.format("sid", "uid"),
json={"method": "totp"})
def test_verify_sms(self):
self.rest_client.put = MagicMock(return_value=self.verified_response)
verified = self.auth.verify("sid", "uid", "token")
self.rest_client.put.assert_called_with(routes.AUTH_VERIFY.format("sid", "uid"),
json={"token": "token"})
self.assertTrue(verified)
def test_verify_totp(self):
self.rest_client.put = MagicMock(return_value=self.verified_response)
verified = self.auth.verify("sid", "uid", "token")
self.rest_client.put.assert_called_with(routes.AUTH_VERIFY.format("sid", "uid"),
json={"token": "token"})
self.assertTrue(verified)
def test_verify_failed(self):
self.rest_client.put = MagicMock(return_value=self.failed_response)
verified = self.auth.verify("sid", "uid", "token")
self.rest_client.put.assert_called_with(routes.AUTH_VERIFY.format("sid", "uid"),
json={"token": "token"})
self.assertFalse(verified)
def test_invalid_input(self):
self.assertRaises(ValueError, controllers.Auth, None)
self.assertRaises(ValueError, self.auth.initiate, None, None, None)
self.assertRaises(ValueError, self.auth.verify, None, None, None, None)
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,045
|
messente/verigator-python
|
refs/heads/master
|
/messente/verigator/controllers.py
|
"""
Controllers module of the api
"""
import sys
from functools import wraps
from messente.verigator import routes, models, client
PY2 = sys.version_info.major == 2
def _validate_input(func):
# decorator for validating that passed arguments are all string
@wraps(func)
def wrapper(*args):
for arg in args[1:]:
if not isinstance(arg, str if not PY2 else basestring):
raise ValueError("{} should be string".format(arg))
return func(*args)
return wrapper
def _validate_client(func):
# decorator for validating that passed client is RestClient
@wraps(func)
def wrapper(self, rest_client):
if not isinstance(rest_client, client.RestClient):
raise ValueError("client should be RestClient")
return func(self, rest_client)
return wrapper
class Services(object):
"""
Controller for service resource
"""
@_validate_client
def __init__(self, rest_client):
"""
Args:
rest_client (client.RestClient):
"""
self.rest_client = rest_client
@_validate_input
def create(self, domain, name):
"""Creates new service.
Args:
domain (str): The domain name.
name (str): The name of the service.
Returns:
models.Service: created service
"""
json = {
'fqdn': domain,
'name': name
}
response = self.rest_client.post(routes.CREATE_SERVICE, json=json)
return self.__service_from_json(response)
@_validate_input
def get(self, id):
"""Fetches service with a given id from the server.
Args:
id (str): The id of the service
Returns:
models.Service: Fetched service
"""
response = self.rest_client.get(routes.GET_SERVICE.format(id))
return self.__service_from_json(response)
@_validate_input
def delete(self, id):
"""Deletes service with id
Args:
id (str): service id
Returns:
bool:
"""
self.rest_client.delete(routes.DELETE_SERVICE.format(id))
return True
@staticmethod
def __service_from_json(json):
return models.Service(json['id'], json['ctime'], json['name'])
# noinspection PyShadowingBuiltins
class Users(object):
"""Controller for service resource
"""
@_validate_client
def __init__(self, rest_client):
"""
Args:
rest_client (client.RestClient):
"""
self.rest_client = rest_client
@_validate_input
def get_all(self, service_id):
"""Fetches all users for the given service
Args:
service_id (str): service id to search users for
Returns:
list[models.User]: list of users
"""
response = self.rest_client.get(routes.GET_USERS.format(service_id))
return [self.__user_from_json(user) for user in response['users']]
@_validate_input
def get(self, service_id, id):
"""Fetches single user with given id for the given service
Args:
service_id (str): service id
id (str): user id
Returns:
models.User: fetched user
"""
response = self.rest_client.get(routes.GET_USER.format(service_id, id))
return self.__user_from_json(response)
@_validate_input
def create(self, service_id, number, username):
"""Creates new user for the given service
Args:
service_id (str): service id
number (str): users phone number, used for 2fa
username (str): username
Returns:
models.User: created user
"""
route = routes.CREATE_USER.format(service_id)
json = {
"phone_number": number,
"id_in_service": username
}
response = self.rest_client.post(route, json=json)
return self.__user_from_json(response)
@_validate_input
def delete(self, service_id, id):
"""Deleted user with given id for the given service
Args:
service_id (str): service id
id (str): user id
Returns:
bool: True on success raises exception on error
"""
self.rest_client.delete(routes.DELETE_USER.format(service_id, id))
return True
@staticmethod
def __user_from_json(json):
return models.User(json['id'], json['ctime'], json['id_in_service'])
class Auth(object):
"""Controller for service resource
"""
METHOD_SMS = "sms"
METHOD_TOTP = "totp"
@_validate_client
def __init__(self, rest_client):
"""
Args:
rest_client (client.RestClient):
"""
self.rest_client = rest_client
@_validate_input
def initiate(self, service_id, user_id, method):
"""Initiates authentication process
sends sms in case of sms authentication
Args:
service_id (str): service id
user_id (str): user id
method (str): auth method (sms or totp) use Auth.METHOD_SMS or Auth.METHOD_TOTP
Note:
System will automatically fall back from TOTP to SMS if user has no devices attached to the number
Returns:
str: string indicating 2FA method used (sms, totp)
"""
route = routes.AUTH_INITIATE.format(service_id, user_id)
json = {"method": method}
response = self.rest_client.post(route, json=json)
method = response['method']
return method
@_validate_input
def verify(self, service_id, user_id, token):
"""Verifies user input validity
Args:
service_id (str): service id
user_id (str): user id
token (str): user provided token
Returns:
bool: boolean indicating verification status
"""
route = routes.AUTH_VERIFY.format(service_id, user_id)
json = {"token": token}
response = self.rest_client.put(route, json=json)
verified = response['verified']
return verified
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,046
|
messente/verigator-python
|
refs/heads/master
|
/messente/verigator/models.py
|
class Service(object):
"""
Attributes:
id (str): unique id
creation_time (str): creation time
name (str): name of the service
"""
def __init__(self, id, creation_time, name):
self.id = id
self.creation_time = creation_time
self.name = name
class User(object):
"""
Attributes:
id (str): unique id
creation_time (str): creation time
username (str): name of the user
"""
def __init__(self, id, creation_time, username):
self.id = id
self.creation_time = creation_time
self.username = username
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,047
|
messente/verigator-python
|
refs/heads/master
|
/messente/verigator/api.py
|
"""
Api description
"""
from messente.verigator import routes, client, controllers
class Api(object):
"""Main class for verigator api,
contains references to other controllers
Attributes:
services (controllers.Services): controller for service resource
users (controllers.Users): controller for user resource
auth (controllers.Auth): controller for auth resource
"""
def __init__(self, username, password, endpoint=routes.URL):
"""
Initialize Verigator api
Args:
username (str): api username. Can be obtained from dashboard
password (str): api password. Can be obtained from dashboard
endpoint (str): api endpoint. Can be obtained from dashboard
"""
rest_client = client.RestClient(endpoint, username, password)
self.users = controllers.Users(rest_client)
self.services = controllers.Services(rest_client)
self.auth = controllers.Auth(rest_client)
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,048
|
messente/verigator-python
|
refs/heads/master
|
/setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name="verigator",
version="1.0.4",
packages=["messente.verigator"],
setup_requires=["requests==2.18.4"],
install_requires=["requests==2.18.4"],
tests_require=["requests-mock==1.3.0", "mock==2.0.0"],
author="Verigator.com",
author_email="admin@verigator.com",
description="Official Verigator.com API library",
license="Apache License, Version 2",
keywords="verigator messente sms verification 2FA pin code",
url="http://messente.com/documentation/",
test_suite="messente.verigator.test"
)
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,049
|
messente/verigator-python
|
refs/heads/master
|
/messente/verigator/test/test_restClient.py
|
from unittest import TestCase
import requests_mock
from messente.verigator import exceptions, client
@requests_mock.mock()
class TestRestClient(TestCase):
def setUp(self):
self.rest_client = client.RestClient("http://test", "test", "test")
self.valid_headers = {
"X-Service-Auth": "test:test"
}
self.valid_get_headers = self.valid_headers
self.valid_post_headers = {
"X-Service-Auth": "test:test",
"Content-Type": "application/json",
"Accept": "application/json"
}
self.sample_response = {
"foo": "bar"
}
def test_auth_header(self, request_mocker):
request_mocker.get("http://test/", request_headers=self.valid_headers, json={})
self.rest_client.get("")
self.assertTrue(request_mocker.called)
def test_get(self, request_mocker):
request_mocker.get("http://test/test?foo=bar", complete_qs=True, request_headers={"foo": "bar"},
json=self.sample_response)
json = self.rest_client.get("test", params={"foo": "bar"}, headers={"foo": "bar"})
self.assertTrue(request_mocker.called)
self.assertEqual(json, self.sample_response)
def test_post(self, request_mocker):
request_mocker.post("http://test/test", json=self.sample_response, request_headers=self.valid_post_headers)
json = self.rest_client.post("test", json=self.sample_response)
self.assertTrue(request_mocker.called)
self.assertEqual(json, self.sample_response)
self.assertEqual(request_mocker.request_history[0].json(), self.sample_response)
def test_put(self, request_mocker):
request_mocker.put("http://test/test", json=self.sample_response, request_headers=self.valid_post_headers)
json = self.rest_client.put("test", json=self.sample_response)
self.assertTrue(request_mocker.called)
self.assertEqual(json, self.sample_response)
self.assertEqual(request_mocker.request_history[0].json(), self.sample_response)
def test_delete(self, request_mocker):
request_mocker.delete("http://test/test", json=self.sample_response, request_headers=self.valid_headers)
json = self.rest_client.delete("test")
self.assertTrue(request_mocker.called)
self.assertEqual(json, self.sample_response)
def test_raises_errors(self, request_mocker):
self._assertAllRoutesRaises(exceptions.InternalError, request_mocker, 500)
self._assertAllRoutesRaises(exceptions.InvalidDataError, request_mocker, 400)
self._assertAllRoutesRaises(exceptions.WrongCredentialsError, request_mocker, 401)
self._assertAllRoutesRaises(exceptions.ResourceForbiddenError, request_mocker, 403)
self._assertAllRoutesRaises(exceptions.NoSuchResourceError, request_mocker, 404)
self._assertAllRoutesRaises(exceptions.ResourceAlreadyExistsError, request_mocker, 409)
self._assertAllRoutesRaises(exceptions.VerigatorError, request_mocker, 447)
def test_non_json_response(self, request_mocker):
request_mocker.register_uri('GET', "http://test/test", text="Some non json response", status_code=200)
self.assertRaises(exceptions.InvalidResponseError, self.rest_client.get, "test")
def _assertAllRoutesRaises(self, exception, request_mocker, code):
self._register_addresses(request_mocker, code)
self.assertRaises(exception, self.rest_client.get, "test")
self.assertRaises(exception, self.rest_client.post, "test")
self.assertRaises(exception, self.rest_client.put, "test")
self.assertRaises(exception, self.rest_client.delete, "test")
@staticmethod
def _register_addresses(request_mocker, code):
request_mocker.register_uri('GET', 'http://test/test', json={}, status_code=code)
request_mocker.register_uri('POST', 'http://test/test', json={}, status_code=code)
request_mocker.register_uri('PUT', 'http://test/test', json={}, status_code=code)
request_mocker.register_uri('DELETE', 'http://test/test', json={}, status_code=code)
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,050
|
messente/verigator-python
|
refs/heads/master
|
/messente/verigator/exceptions.py
|
"""
Exceptions module of the api
"""
class VerigatorError(Exception):
"""
Base error class for all verigator related errors.
"""
def __init__(self, code, message):
super(VerigatorError, self).__init__(message)
self.code = code
self.message = message
class InvalidDataError(VerigatorError):
"""
This error is raised when provided data is invalid
"""
def __init__(self, code, message):
super(InvalidDataError, self).__init__(code, message)
class NoSuchResourceError(VerigatorError):
"""
This error is raised when the resource that yu were looking for does not exist
"""
def __init__(self, code, message):
super(NoSuchResourceError, self).__init__(code, message)
class ResourceAlreadyExistsError(VerigatorError):
"""
This error is raised when you are creating resource that already exists
"""
def __init__(self, code, message):
super(ResourceAlreadyExistsError, self).__init__(code, message)
class ResourceForbiddenError(VerigatorError):
"""
This error raises when you don't have permissions to access the resource
"""
def __init__(self, code, message):
super(ResourceForbiddenError, self).__init__(code, message)
class WrongCredentialsError(VerigatorError):
"""
This error raises when you provided invalid credentials.
Please see messente dashboard for correct username and password
"""
def __init__(self, code, message):
super(WrongCredentialsError, self).__init__(code, message)
class InternalError(VerigatorError):
"""
This error means that there is a problem on the server side.
"""
def __init__(self, code, message):
super(InternalError, self).__init__(code, message)
class InvalidResponseError(VerigatorError):
"""
This error usually raises when server returned non-json response
"""
def __init__(self, code, message):
super(InvalidResponseError, self).__init__(code, message)
|
{"/examples/example.py": ["/messente/verigator/api.py"]}
|
11,110
|
Oliver0047/Campus-Chatbot
|
refs/heads/master
|
/preprocessing.py
|
# -*- coding: UTF-8 -*-
#autor:Oliver
import jieba
class preprocessing():
__PAD__ = 0#填充符
__EOS__ = 1#结束符
__GO__ = 2#开始符
__UNK__ = 3#未知符
vocab = ['__PAD__', '__EOS__', '__GO__','__UNK__']
def __init__(self):
self.encoderFile = "./data/question.txt"#问题
self.decoderFile = "./data/answer.txt"#回答
self.savePath = './data/'#储存路径
jieba.load_userdict("./data/supplementvocab.txt")#选择jieba的中文分词字典
def wordToVocabulary(self, originFile, vocabFile, segementFile):
vocabulary = []
sege = open(segementFile, "w",encoding='utf-8')
with open(originFile, 'r',encoding='utf-8') as en:
for sent in en.readlines():
if "enc" in segementFile:
words = jieba.lcut(sent.strip())#jieba分词,返回列表
print(words)
else:
words = jieba.lcut(sent.strip())
vocabulary.extend(words)#初步形成字典
for word in words:#储存每行分词结果
sege.write(word+" ")
sege.write("\n")
sege.close()
# 去重并存入词典
vocab_file = open(vocabFile, "w",encoding='utf-8')
_vocabulary = list(set(vocabulary))
_vocabulary.sort(key=vocabulary.index)
_vocabulary = self.vocab + _vocabulary#加入特殊符号形成最终字典
if "enc" in segementFile:
print('encode_vocab_length: ',len(_vocabulary))
else:
print('decode_vocab_length: ',len(_vocabulary))
for index, word in enumerate(_vocabulary):
vocab_file.write(word+"\n")
vocab_file.close()
def toVec(self, segementFile, vocabFile, doneFile):
word_dicts = {}
vec = []
with open(vocabFile, "r",encoding='utf-8') as dict_f:#将字典封装成索引词表
for index, word in enumerate(dict_f.readlines()):
word_dicts[word.strip()] = index
f = open(doneFile, "w",encoding='utf-8')
#如果单独或者连续输入未知符号,则回答未知符号
if "enc.vec" in doneFile:
f.write("3 3 3 3\n")
f.write("3\n")
elif "dec.vec" in doneFile:
f.write(str(word_dicts.get("other", 3))+"\n")
f.write(str(word_dicts.get("other", 3))+"\n")
with open(segementFile, "r",encoding='utf-8') as sege_f:
for sent in sege_f.readlines():
sents = [i.strip() for i in sent.split(" ")[:-1]]
vec.extend(sents)
for word in sents:
f.write(str(word_dicts.get(word))+" ")#将字词转为索引号
f.write("\n")
f.close()
def main(self):
# 获得字典
self.wordToVocabulary(self.encoderFile, self.savePath+'enc.vocab', self.savePath+'enc.segement')
self.wordToVocabulary(self.decoderFile, self.savePath+'dec.vocab', self.savePath+'dec.segement')
# 转向量
self.toVec(self.savePath+"enc.segement",
self.savePath+"enc.vocab",
self.savePath+"enc.vec")
self.toVec(self.savePath+"dec.segement",
self.savePath+"dec.vocab",
self.savePath+"dec.vec")
if __name__ == '__main__':
pre = preprocessing()
pre.main()
|
{"/html2py.py": ["/seq2seq.py"]}
|
11,111
|
Oliver0047/Campus-Chatbot
|
refs/heads/master
|
/html2py.py
|
# -*- coding: UTF-8 -*-
#autor:Oliver
from flask import jsonify
import json
from seq2seq import seq2seq
from flask import Flask, render_template, request
import warnings
warnings.filterwarnings("ignore")
import sys
sys.path.append('.')
chat=seq2seq()
app = Flask('chatbot')
@app.route('/')
def gethtml():
chat.prepare()
return render_template('index.html')
@app.route('/predict',methods=['POST','GET'])
def predict():
mydata = json.loads(request.get_data())
data=mydata['question']
try:
pred=chat.predict_one(data)
except:
chat.prepare()
pred=chat.predict_one(data)
return jsonify(result=pred)
if __name__=='__main__':
app.run(host="0.0.0.0",port=5010)
|
{"/html2py.py": ["/seq2seq.py"]}
|
11,112
|
Oliver0047/Campus-Chatbot
|
refs/heads/master
|
/seq2seq.py
|
# -*- coding: UTF-8 -*-
#autor:Oliver
import os
import random
import sys
import time
import jieba
import numpy as np
import pandas as pd
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
from torch.autograd import Variable
import sys
sys.path.append('.')
USE_CUDA = torch.cuda.is_available()#如果有GPU可以使用,那么使用GPU计算
EOS_token = 1#结束符
SOS_token = 2#开始符
f=open('data/enc.vocab','r',encoding='utf-8')
enc_vocab=f.readlines()
enc_len=len(enc_vocab)#编码表长度
f.flush()
f.close()
f=open('data/dec.vocab','r',encoding='utf-8')
dec_vocab=f.readlines()
dec_len=len(dec_vocab)#解码表长度
f.flush()
f.close()
del(enc_vocab)#消去变量
del(dec_vocab)
class EncoderRNN(nn.Module):#编码器
def __init__(self, input_size, hidden_size, n_layers=1):
super(EncoderRNN, self).__init__()
self.input_size = input_size#输入大小,指问句中每个字或词的索引的one-hot编码维度,即编码表的大小
self.hidden_size = hidden_size#隐含层大小
self.n_layers = n_layers#RNN层数
self.embedding = nn.Embedding(input_size, hidden_size)#形成词向量
self.gru = nn.GRU(hidden_size, hidden_size, n_layers)#门控循环神经网络
def forward(self, word_inputs, hidden):
seq_len = len(word_inputs)
embedded = self.embedding(word_inputs).view(seq_len, 1, -1)
output, hidden = self.gru(embedded, hidden)
return output, hidden
def init_hidden(self):
hidden = Variable(torch.zeros(self.n_layers, 1, self.hidden_size))
if USE_CUDA: hidden = hidden.cuda()
return hidden
class Attn(nn.Module):#注意力机制
def __init__(self, method, hidden_size, max_length):
super(Attn, self).__init__()
self.method = method
self.hidden_size = hidden_size
if self.method == 'general':
self.attn = nn.Linear(self.hidden_size, hidden_size)
elif self.method == 'concat':
self.attn = nn.Linear(self.hidden_size * 2, hidden_size)
self.other = nn.Parameter(torch.FloatTensor(1, hidden_size))
def forward(self, hidden, encoder_outputs):
seq_len = len(encoder_outputs)
attn_energies = Variable(torch.zeros(seq_len))
if USE_CUDA: attn_energies = attn_energies.cuda()
for i in range(seq_len):
attn_energies[i] = self.score(hidden, encoder_outputs[i])#计算权重
return F.softmax(attn_energies).unsqueeze(0).unsqueeze(0)#利用softmax将权重归一化
def score(self, hidden, encoder_output):
if self.method == 'dot':
energy = torch.dot(hidden.view(-1), encoder_output.view(-1))
return energy
elif self.method == 'general':
energy = self.attn(encoder_output)
energy = torch.dot(hidden.view(-1), encoder_output.view(-1))#torch.dot指各个元素相乘然后相加,和numpy不同
return energy
class AttnDecoderRNN(nn.Module):#加入了注意力机制的解码器
def __init__(self, attn_model, hidden_size, output_size, n_layers=1, dropout_p=0.1, max_length=10):
super(AttnDecoderRNN, self).__init__()
self.attn_model = attn_model
self.hidden_size = hidden_size
self.output_size = output_size
self.n_layers = n_layers
self.dropout_p = dropout_p
self.max_length = max_length
self.embedding = nn.Embedding(output_size, hidden_size)
self.gru = nn.GRU(hidden_size * 2, hidden_size, n_layers, dropout=dropout_p)
self.out = nn.Linear(hidden_size * 2, output_size)
if attn_model != 'none':
self.attn = Attn(attn_model, hidden_size, self.max_length)
def forward(self, word_input, last_context, last_hidden, encoder_outputs):
word_embedded = self.embedding(word_input).view(1, 1, -1) #解码器输入转词向量
rnn_input = torch.cat((word_embedded, last_context.unsqueeze(0)), 2)#将词向量与上一个背景向量连接
rnn_output, hidden = self.gru(rnn_input, last_hidden)#rnn_output相当于当下解码器输出的上下文环境
attn_weights = self.attn(rnn_output.squeeze(0), encoder_outputs)#利用这个上下文环境计算新的背景向量权重
context = attn_weights.bmm(encoder_outputs.transpose(0, 1))#形成新的背景向量
rnn_output = rnn_output.squeeze(0)
context = context.squeeze(1)
output = F.log_softmax(self.out(torch.cat((rnn_output, context), 1)))#根绝输入输出的上下文环境计算解码器当下的输出
return output, context, hidden, attn_weights
class seq2seq(nn.Module):
def __init__(self):
super(seq2seq, self).__init__()
self.max_epoches = 5000#最大训练次数
self.batch_index = 0#从第0个问答序列开始
self.GO_token = 2
self.EOS_token = 1
self.input_size = 1500#编码器词表大小
self.output_size = 1500#解码器词表大小
self.hidden_size = 1024
self.max_length = 15#句长
self.show_epoch = 100#每训练一百次显示一次训练数据
self.use_cuda = USE_CUDA
self.model_path = "./model/"
self.n_layers = 1
self.dropout_p = 0.05
self.beam_search = True#使用束搜索
self.top_k = 5#选择可能性最大的5个序列
self.alpha = 0.5#惩罚因子
self.enc_vec = []#编码表
self.dec_vec = []#解码表
# 初始化encoder和decoder
self.encoder = EncoderRNN(self.input_size, self.hidden_size, self.n_layers)
self.decoder = AttnDecoderRNN('general', self.hidden_size, self.output_size, self.n_layers, self.dropout_p, self.max_length)
if USE_CUDA:
self.encoder = self.encoder.cuda()
self.decoder = self.decoder.cuda()
#设置优化器
self.encoder_optimizer = optim.Adam(self.encoder.parameters())
self.decoder_optimizer = optim.Adam(self.decoder.parameters())
#设置损失函数
self.criterion = nn.NLLLoss()
def loadData(self):#导入编码数据和解码数据
with open("./data/enc.vec") as enc:
line = enc.readline()
while line:
self.enc_vec.append(line.strip().split())
line = enc.readline()
with open("./data/dec.vec") as dec:
line = dec.readline()
while line:
self.dec_vec.append(line.strip().split())
line = dec.readline()
def next(self, batch_size, eos_token=1, go_token=2, shuffle=False):#取一份数据
inputs = []
targets = []
if shuffle:#随机选择一行数据
ind = random.choice(range(len(self.enc_vec)))
enc = [self.enc_vec[ind]]
dec = [self.dec_vec[ind]]
else:#按顺序选择一个batch数据
if self.batch_index+batch_size >= len(self.enc_vec):
enc = self.enc_vec[self.batch_index:]
dec = self.dec_vec[self.batch_index:]
self.batch_index = 0
else:
enc = self.enc_vec[self.batch_index:self.batch_index+batch_size]
dec = self.dec_vec[self.batch_index:self.batch_index+batch_size]
self.batch_index += batch_size
for index in range(len(enc)):
#限制长度
enc = enc[0][:self.max_length] if len(enc[0]) > self.max_length else enc[0]
dec = dec[0][:self.max_length] if len(dec[0]) > self.max_length else dec[0]
enc = [int(i) for i in enc]
dec = [int(i) for i in dec]
dec.append(eos_token)#为解码数据添加结束符
inputs.append(enc)
targets.append(dec)
inputs = Variable(torch.LongTensor(inputs)).transpose(1, 0).contiguous()#封装为变量,并保证在一个内存块上
targets = Variable(torch.LongTensor(targets)).transpose(1, 0).contiguous()
if USE_CUDA:
inputs = inputs.cuda()
targets = targets.cuda()
return inputs, targets
def train(self):#训练
self.loadData()
try:#如果有已知模型,就在已知模型上继续训练
self.load_state_dict(torch.load(self.model_path+'params.pkl'))
except Exception as e:
print(e)
print("No model!")
loss_track = []
for epoch in range(self.max_epoches):
start = time.time()
inputs, targets = self.next(1, shuffle=False)#取出一份数据
loss, logits = self.step(inputs, targets, self.max_length)#返回损失值和输出
loss_track.append(loss)
_,v = torch.topk(logits, 1)#取出可能性最高的输出
pre = v.cpu().data.numpy().T.tolist()[0][0]
tar = targets.cpu().data.numpy().T.tolist()[0]
stop = time.time()
if epoch % self.show_epoch == 0:
print("-"*50)
print("epoch:", epoch)
print(" loss:", loss)
print(" target:%s\n output:%s" % (tar, pre))
print(" per-time:", (stop-start))
torch.save(self.state_dict(), self.model_path+'params.pkl')
def step(self, input_variable, target_variable, max_length):#一份数据前向传播,反向传播,参数更新
teacher_forcing_ratio = 0.1
clip = 5.0#梯度裁剪,防止梯度爆炸,这是RNN经常会出现的问题
loss = 0
#每次训练将梯度归零
self.encoder_optimizer.zero_grad()
self.decoder_optimizer.zero_grad()
input_length = input_variable.size()[0]
target_length = target_variable.size()[0]
encoder_hidden = self.encoder.init_hidden()
encoder_outputs, encoder_hidden = self.encoder(input_variable, encoder_hidden)#编码
decoder_input = Variable(torch.LongTensor([[SOS_token]]))
decoder_context = Variable(torch.zeros(1, self.decoder.hidden_size))
decoder_hidden = encoder_hidden
if USE_CUDA:
decoder_input = decoder_input.cuda()
decoder_context = decoder_context.cuda()
decoder_outputs = []
use_teacher_forcing = random.random() < teacher_forcing_ratio#随机切换方式
use_teacher_forcing = True
if use_teacher_forcing:#使用正确的标签数据作为下一次解码器输入
for di in range(target_length):
decoder_output, decoder_context, decoder_hidden, decoder_attention = self.decoder(decoder_input, decoder_context, decoder_hidden, encoder_outputs)#解码
loss += self.criterion(decoder_output, target_variable[di])#累计损失
decoder_input = target_variable[di]
decoder_outputs.append(decoder_output.unsqueeze(0))
else:#使用当下解码器输出作为下一次解码器输入
for di in range(target_length):
decoder_output, decoder_context, decoder_hidden, decoder_attention = self.decoder(decoder_input, decoder_context, decoder_hidden, encoder_outputs)
loss += self.criterion(decoder_output, target_variable[di])
decoder_outputs.append(decoder_output.unsqueeze(0))
topv, topi = decoder_output.data.topk(1)
ni = topi[0][0]
decoder_input = Variable(torch.LongTensor([[ni]]))
if USE_CUDA: decoder_input = decoder_input.cuda()
if ni == EOS_token: break
loss.backward()#梯度反向传播
torch.nn.utils.clip_grad_norm(self.encoder.parameters(), clip)#梯度裁剪
torch.nn.utils.clip_grad_norm(self.decoder.parameters(), clip)
self.encoder_optimizer.step()#参数优化
self.decoder_optimizer.step()
decoder_outputs = torch.cat(decoder_outputs, 0)#解码器输出
return loss.data[0] / target_length, decoder_outputs
def input_deal(self, input_vec):#将编码器输入向量限制长度,并封装为变量
inputs = []
enc = input_vec[:self.max_length] if len(input_vec) > self.max_length else input_vec#向量限制长度
inputs.append(enc)
inputs = Variable(torch.LongTensor(inputs)).transpose(1, 0).contiguous()#封装为变量
if USE_CUDA:
inputs = inputs.cuda()
return inputs
def prepare(self):
try:
self.load_state_dict(torch.load(self.model_path+'params.pkl'))#如果有模型就加载
except Exception as e:
print(e)
print("No model!")
# 加载字典
self.str_to_vec = {}
with open("./data/enc.vocab") as enc_vocab:
for index,word in enumerate(enc_vocab.readlines()):
self.str_to_vec[word.strip()] = index
self.vec_to_str = {}
with open("./data/dec.vocab") as dec_vocab:
for index,word in enumerate(dec_vocab.readlines()):
self.vec_to_str[index] = word.strip()
def predict_one(self,data):
# 字符串转向量
segement = jieba.lcut(data.strip())
input_vec = [self.str_to_vec.get(i, 3) for i in segement]
input_vec = self.input_deal(input_vec)#向量处理
samples = self.beamSearchDecoder(input_vec)#得到概率top5的结果
samples.sort(key=lambda x:-x[3])
sample=samples[0]#取出概率最大的序列结果
outstrs = []
for i in sample[0]:
if i == 1:
break
outstrs.append(self.vec_to_str.get(i, "Un"))#序列转字符
if ("Un" in outstrs) or ("__UNK__" in outstrs):
return "风太大,我听不见><"
return "".join(outstrs)
def predict(self):#预测
try:
self.load_state_dict(torch.load(self.model_path+'params.pkl'))#如果有模型就加载
except Exception as e:
print(e)
print("No model!")
loss_track = []
# 加载字典
str_to_vec = {}
with open("./data/enc.vocab",encoding='utf-8') as enc_vocab:
for index,word in enumerate(enc_vocab.readlines()):
str_to_vec[word.strip()] = index
vec_to_str = {}
with open("./data/dec.vocab",encoding='utf-8') as dec_vocab:
for index,word in enumerate(dec_vocab.readlines()):
vec_to_str[index] = word.strip()
while True:
input_strs = input(">> ")
# 字符串转向量
segement = jieba.lcut(input_strs)
input_vec = [str_to_vec.get(i, 3) for i in segement]
input_vec = self.input_deal(input_vec)#向量处理
# 选择序列输出方式
if self.beam_search:#采用beam search
samples = self.beamSearchDecoder(input_vec)#得到概率top5的结果
samples.sort(key=lambda x:-x[3])
sample=samples[0]#取出概率最大的序列结果
outstrs = []
for i in sample[0]:
if i == 1:
break
outstrs.append(vec_to_str.get(i, "Un"))#序列转字符
print("小电 > ", "".join(outstrs))
else:#普通的序列输出
logits = self.normal_search(input_vec)#按照每个时刻选择最高概率的字符输出,得到最终序列
_,v = torch.topk(logits, 1)
pre = v.cpu().data.numpy().T.tolist()[0][0]
outstrs = []
for i in pre:
if i == 1:
break
outstrs.append(vec_to_str.get(i, "Un"))
print("小电 > ", "".join(outstrs))
def normal_search(self, input_variable):#按照每个时刻选择最高概率的字符输出,得到最终序列
input_length = input_variable.size()[0]
encoder_hidden = self.encoder.init_hidden()
encoder_outputs, encoder_hidden = self.encoder(input_variable, encoder_hidden)
decoder_input = Variable(torch.LongTensor([[SOS_token]]))
decoder_context = Variable(torch.zeros(1, self.decoder.hidden_size))
decoder_hidden = encoder_hidden
if USE_CUDA:
decoder_input = decoder_input.cuda()
decoder_context = decoder_context.cuda()
decoder_outputs = []
for i in range(self.max_length):
decoder_output, decoder_context, decoder_hidden, decoder_attention = self.decoder(decoder_input, decoder_context, decoder_hidden, encoder_outputs)
decoder_outputs.append(decoder_output.unsqueeze(0))
topv, topi = decoder_output.data.topk(1)
ni = topi[0][0]
decoder_input = Variable(torch.LongTensor([[ni]])) #使用当下解码器输出作为下一次解码器输入
if USE_CUDA: decoder_input = decoder_input.cuda()
if ni == EOS_token: break
decoder_outputs = torch.cat(decoder_outputs, 0)
return decoder_outputs
def tensorToList(self, tensor):#tensor转list
return tensor.cpu().data.numpy().tolist()[0]
def beamSearchDecoder(self, input_variable):#Beam Search算法
input_length = input_variable.size()[0]
encoder_hidden = self.encoder.init_hidden()
encoder_outputs, encoder_hidden = self.encoder(input_variable, encoder_hidden)
decoder_input = Variable(torch.LongTensor([[SOS_token]]))
decoder_context = Variable(torch.zeros(1, self.decoder.hidden_size))
decoder_hidden = encoder_hidden
if USE_CUDA:
decoder_input = decoder_input.cuda()
decoder_context = decoder_context.cuda()
decoder_output, decoder_context, decoder_hidden, decoder_attention = self.decoder(decoder_input, decoder_context, decoder_hidden, encoder_outputs)
topk = decoder_output.data.topk(self.top_k)#输入开始符,得到前5大概率的输出字符以及对应信息
samples = [[] for i in range(self.top_k)]
dead_k = 0
final_samples = []
for index in range(self.top_k):#储存前5大概率的输出字符,以及对应的分数,背景向量等
topk_prob = topk[0][0][index]
topk_index = int(topk[1][0][index])
samples[index] = [[topk_index], topk_prob, 0, 0, decoder_context, decoder_hidden, decoder_attention, encoder_outputs]
for _ in range(self.max_length):
tmp = []
for index in range(len(samples)):
tmp.extend(self.beamSearchInfer(samples[index], index))#对每个储存的字符序列,继续预测下一个输出字符,保留前5大概率的字符输出
samples = []
# 筛选出topk
df = pd.DataFrame(tmp)#封装成数据帧格式
df.columns = ['sequence', 'pre_socres', 'fin_scores', "ave_scores", "decoder_context", "decoder_hidden", "decoder_attention", "encoder_outputs"]
sequence_len = df.sequence.apply(lambda x:len(x))#取出序列长度
df['ave_scores'] = df['fin_scores'] / sequence_len#计算平均分
df = df.sort_values('ave_scores', ascending=False).reset_index().drop(['index'], axis=1)#根据平均分从大到小排序
df = df[:(self.top_k-dead_k)]#最多取5个带结束符的序列
for index in range(len(df)):
group = df.ix[index]#取出序列已经对应信息
if group.tolist()[0][-1] == 1:#如果该序列的结尾是结束符
final_samples.append(group.tolist())#那就加入最终输出序列组中
df = df.drop([index], axis=0)#舍弃该序列
dead_k += 1#表示需要的序列数量减一
#print("drop {}, {}".format(group.tolist()[0], dead_k))
samples = df.values.tolist()
if len(samples) == 0:#如果已经没有序列了,那就可以结束了
break
if len(final_samples) < self.top_k:
final_samples.extend(samples[:(self.top_k-dead_k)])#如果最终序列的数量不够,那就取几个概率较大的补上
return final_samples
def beamSearchInfer(self, sample, k):#计算已知序列的下一个输出字符,并计算分数
samples = []
decoder_input = Variable(torch.LongTensor([[sample[0][-1]]]))
if USE_CUDA:
decoder_input = decoder_input.cuda()
sequence, pre_scores, fin_scores, ave_scores, decoder_context, decoder_hidden, decoder_attention, encoder_outputs = sample
decoder_output, decoder_context, decoder_hidden, decoder_attention = self.decoder(decoder_input, decoder_context, decoder_hidden, encoder_outputs)
# choose topk
topk = decoder_output.data.topk(self.top_k)
for k in range(self.top_k):
topk_prob = topk[0][0][k]#取出该字符概率
topk_index = int(topk[1][0][k])#取出该字符索引
pre_scores += topk_prob#分数累加
fin_scores = pre_scores - (k - 1 ) * self.alpha#加入惩罚因子
#数据更新
samples.append([sequence+[topk_index], pre_scores, fin_scores, ave_scores, decoder_context, decoder_hidden, decoder_attention, encoder_outputs])
return samples
def retrain(self):#从头开始训练
try:
os.remove(self.model_path)
except Exception as e:
pass
self.train()
if __name__ == '__main__':
seq = seq2seq()
if sys.argv[1] == 'train':#训练模式
seq.train()
elif sys.argv[1] == 'predict':#预测模式
seq.predict()
elif sys.argv[1] == 'retrain':#从头开始训练
seq.retrain()
|
{"/html2py.py": ["/seq2seq.py"]}
|
11,113
|
nicktimko/technicolor-clippy
|
refs/heads/master
|
/technicolor/format.py
|
import sys
import time
from pygments import highlight
from pygments.lexers import PythonLexer
from pygments.formatters import HtmlFormatter
import win32clipboard
from .html_clipboard import HtmlClipboard, put_html
def highlight_python(snippet):
return highlight(snippet, PythonLexer(), HtmlFormatter(noclasses=True))
def main():
win32clipboard.OpenClipboard()
snippet = win32clipboard.GetClipboardData()
highlighted = highlight_python(snippet)
print(highlighted)
# put_html(highlighted)
time.sleep(5)
if __name__ == '__main__':
sys.exit(main())
|
{"/technicolor/format.py": ["/technicolor/html_clipboard.py"]}
|
11,114
|
nicktimko/technicolor-clippy
|
refs/heads/master
|
/technicolor/html_clipboard.py
|
from __future__ import absolute_import, print_function
import re
import win32clipboard
def has_html():
"""
Return True if there is a Html fragment in the clipboard..
"""
cb = HtmlClipboard()
return cb.has_html_format()
def get_html():
"""
Return the Html fragment from the clipboard or None if there is no Html in the clipboard.
"""
cb = HtmlClipboard()
if cb.has_html_format():
return cb.get_fragment()
else:
return None
def put_html(fragment):
"""
Put the given fragment into the clipboard.
Convenience function to do the most common operation
"""
cb = HtmlClipboard()
cb.put_fragment(fragment)
class HtmlClipboard(object):
CF_HTML = None
MARKER_BLOCK_OUTPUT = (
"Version:1.0\r\n"
"StartHTML:%09d\r\n"
"EndHTML:%09d\r\n"
"StartFragment:%09d\r\n"
"EndFragment:%09d\r\n"
"StartSelection:%09d\r\n"
"EndSelection:%09d\r\n"
"SourceURL:%s\r\n"
)
MARKER_BLOCK_EX = (
"Version:(\S+)\s+"
"StartHTML:(\d+)\s+"
"EndHTML:(\d+)\s+"
"StartFragment:(\d+)\s+"
"EndFragment:(\d+)\s+"
"StartSelection:(\d+)\s+"
"EndSelection:(\d+)\s+"
"SourceURL:(\S+)"
)
MARKER_BLOCK_EX_RE = re.compile(MARKER_BLOCK_EX)
MARKER_BLOCK = (
"Version:(\S+)\s+"
"StartHTML:(\d+)\s+"
"EndHTML:(\d+)\s+"
"StartFragment:(\d+)\s+"
"EndFragment:(\d+)\s+"
"SourceURL:(\S+)"
)
MARKER_BLOCK_RE = re.compile(MARKER_BLOCK)
DEFAULT_HTML_BODY = (
"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\">"
"<HTML><HEAD></HEAD><BODY><!--StartFragment-->%s<!--EndFragment--></BODY></HTML>"
)
def __init__(self):
self.html = None
self.fragment = None
self.selection = None
self.source = None
self.html_clipboard_version = None
def get_cf_html(self):
"""
Return the FORMATID of the HTML format
"""
if self.CF_HTML is None:
self.CF_HTML = win32clipboard.RegisterClipboardFormat("HTML Format")
return self.CF_HTML
def get_available_formats(self):
"""
Return a possibly empty list of formats available on the clipboard
"""
formats = []
try:
win32clipboard.OpenClipboard(0)
cf = win32clipboard.EnumClipboardFormats(0)
while (cf != 0):
formats.append(cf)
cf = win32clipboard.EnumClipboardFormats(cf)
finally:
win32clipboard.CloseClipboard()
return formats
def has_html_format(self):
"""
Return a boolean indicating if the clipboard has data in HTML format
"""
return (self.get_cf_html() in self.get_available_formats())
def get_from_clipboard(self):
"""
Read and decode the HTML from the clipboard
"""
try:
win32clipboard.OpenClipboard(0)
src = win32clipboard.GetClipboardData(self.get_cf_html())
#print src
src = src.decode('utf-16')
self.decode_clipboard_source(src)
finally:
win32clipboard.CloseClipboard()
def decode_clipboard_source(self, src):
"""
Decode the given string to figure out the details of the HTML that's on the string
"""
# Try the extended format first (which has an explicit selection)
matches = self.MARKER_BLOCK_EX_RE.match(src)
if matches:
self.prefix = matches.group(0)
self.html_clipboard_version = matches.group(1)
self.html = src[int(matches.group(2)):int(matches.group(3))]
self.fragment = src[int(matches.group(4)):int(matches.group(5))]
self.selection = src[int(matches.group(6)):int(matches.group(7))]
self.source = matches.group(8)
else:
# Failing that, try the version without a selection
matches = self.MARKER_BLOCK_RE.match(src)
if matches:
self.prefix = matches.group(0)
self.html_clipboard_version = matches.group(1)
self.html = src[int(matches.group(2)):int(matches.group(3))]
self.fragment = src[int(matches.group(4)):int(matches.group(5))]
self.source = matches.group(6)
self.selection = self.fragment
def get_html(self, refresh=False):
"""
Return the entire Html document
"""
if not self.html or refresh:
self.get_from_clipboard()
return self.html
def get_fragment(self, refresh=False):
"""
Return the Html fragment. A fragment is well-formated HTML enclosing the selected text
"""
if not self.fragment or refresh:
self.get_from_clipboard()
return self.fragment
def get_selection(self, refresh=False):
"""
Return the part of the HTML that was selected. It might not be well-formed.
"""
if not self.selection or refresh:
self.get_from_clipboard()
return self.selection
def get_source(self, refresh=False):
"""
Return the URL of the source of this HTML
"""
if not self.selection or refresh:
self.get_from_clipboard()
return self.source
def put_fragment(self, fragment, selection=None, html=None, source=None):
"""
Put the given well-formed fragment of Html into the clipboard.
selection, if given, must be a literal string within fragment.
html, if given, must be a well-formed Html document that textually
contains fragment and its required markers.
"""
if selection is None:
selection = fragment
if html is None:
html = self.DEFAULT_HTML_BODY % fragment
if source is None:
source = "file://HtmlClipboard.py"
fragment_start = html.index(fragment)
fragment_end = fragment_start + len(fragment)
selection_start = html.index(selection)
selection_end = selection_start + len(selection)
self.put_to_clipboard(html, fragment_start, fragment_end, selection_start, selection_end, source)
def put_to_clipboard(self, html, fragment_start, fragment_end, selection_start, selection_end, source="None"):
"""
Replace the Clipboard contents with the given html information.
"""
try:
win32clipboard.OpenClipboard(0)
win32clipboard.EmptyClipboard()
src = self.encode_clipboard_source(html, fragment_start, fragment_end, selection_start, selection_end, source)
#print src
win32clipboard.SetClipboardData(self.get_cf_html(), src)
finally:
win32clipboard.CloseClipboard()
def encode_clipboard_source(self, html, fragment_start, fragment_end, selection_start, selection_end, source):
"""
Join all our bits of information into a string formatted as per the HTML format specs.
"""
# How long is the prefix going to be?
dummy_prefix = self.MARKER_BLOCK_OUTPUT % (0, 0, 0, 0, 0, 0, source)
len_prefix = len(dummy_prefix)
prefix = self.MARKER_BLOCK_OUTPUT % (
len_prefix,
len(html) + len_prefix,
fragment_start + len_prefix,
fragment_end + len_prefix,
selection_start + len_prefix,
selection_end + len_prefix,
source
)
return (prefix + html)
def dump_html():
cb = HtmlClipboard()
print("GetAvailableFormats()=%s" % str(cb.get_available_formats()))
print("HasHtmlFormat()=%s" % str(cb.has_html_format()))
if cb.has_html_format():
cb.get_from_clipboard()
print("prefix=>>>%s<<<END" % cb.prefix)
print("htmlClipboardVersion=>>>%s<<<END" % cb.html_clipboard_version)
print("GetSelection()=>>>%s<<<END" % cb.get_selection())
print("GetFragment()=>>>%s<<<END" % cb.get_fragment())
print("GetHtml()=>>>%s<<<END" % cb.get_html())
print("GetSource()=>>>%s<<<END" % cb.get_source())
if __name__ == '__main__':
def test_SimpleGetPutHtml():
data = "<p>Writing to the clipboard is <strong>easy</strong> with this code.</p>"
put_html(data)
if get_html() == data:
print("passed")
else:
print("failed")
test_SimpleGetPutHtml()
dump_html()
|
{"/technicolor/format.py": ["/technicolor/html_clipboard.py"]}
|
11,126
|
hkorzeniewski/Flask-activity-registration
|
refs/heads/main
|
/website/views.py
|
from flask import Blueprint, render_template, request, flash, jsonify,redirect
from flask_login import login_user, login_required, logout_user, current_user
from .models import Activity, Cardio
from . import db
import json
views = Blueprint('views', __name__)
@views.route('/', methods = ['GET'])
@login_required
def home():
return render_template("home.html", user=current_user)
@views.route('/activities', methods=['GET','POST'])
@login_required
def activities():
return render_template("activities.html", user=current_user)
@views.route('/cardio', methods=['GET','POST'])
@login_required
def cardio():
if request.method =='POST':
cardio_name = request.form.get('cardio_name')
place = request.form.get('place')
distance = request.form.get('distance')
duration = request.form.get('duration')
if len(cardio_name) < 1:
flash('Name of activity is too short', category='error')
else:
new_cardio = Cardio(cardio_name=cardio_name, place=place, distance=distance, duration=duration, user_id=current_user.id)
db.session.add(new_cardio)
db.session.commit()
flash('Cardio added', category='Success')
return render_template("cardio.html", user=current_user)
@views.route('/activity', methods=['GET','POST'])
@login_required
def activity():
if request.method =='POST':
activity_name = request.form.get('activity_name')
duration = request.form.get('duration')
description = request.form.get('description')
if len(activity_name) < 1:
flash('Name of activity is too short', category='error')
else:
new_activity = Activity(activity_name=activity_name,duration=duration, description=description, user_id=current_user.id)
db.session.add(new_activity)
db.session.commit()
flash('Activity added', category='Success')
return render_template("activity.html", user=current_user)
@views.route('/delete-activity/<int:id>', methods=['POST'])
def delete_activity(id):
activity_delete = Activity.query.get_or_404(id)
db.session.delete(activity_delete)
db.session.commit()
return redirect('/')
@views.route('/delete-cardio/<int:id>', methods=['POST'])
def delete_cardio(id):
cardio_delete = Cardio.query.get_or_404(id)
db.session.delete(cardio_delete)
db.session.commit()
return redirect('/')
@views.route('/activity/<int:id>')
def detail_activity(id):
activity_detail = Activity.query.get(id)
return render_template("activity-detail.html", user=current_user, activity=activity_detail)
@views.route('/cardio/<int:id>')
def detail_cardio(id):
cardio_detail = Cardio.query.get(id)
return render_template("cardio-detail.html", user=current_user, cardio=cardio_detail)
|
{"/website/views.py": ["/website/models.py"]}
|
11,127
|
hkorzeniewski/Flask-activity-registration
|
refs/heads/main
|
/migrations/versions/a6889e26a94c_.py
|
"""empty message
Revision ID: a6889e26a94c
Revises: 4aad4f7b688b
Create Date: 2021-03-05 19:41:24.581276
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a6889e26a94c'
down_revision = '4aad4f7b688b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('cardio', sa.Column('date', sa.DateTime(timezone=True), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('cardio', 'date')
# ### end Alembic commands ###
|
{"/website/views.py": ["/website/models.py"]}
|
11,128
|
hkorzeniewski/Flask-activity-registration
|
refs/heads/main
|
/migrations/versions/4aad4f7b688b_.py
|
"""empty message
Revision ID: 4aad4f7b688b
Revises: c8b93fe68601
Create Date: 2021-03-05 19:29:37.355979
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4aad4f7b688b'
down_revision = 'c8b93fe68601'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('activity', sa.Column('description', sa.Text(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('activity', 'description')
# ### end Alembic commands ###
|
{"/website/views.py": ["/website/models.py"]}
|
11,129
|
hkorzeniewski/Flask-activity-registration
|
refs/heads/main
|
/secrets.py
|
SECRET_KEY = "SouthernIcyOcean"
|
{"/website/views.py": ["/website/models.py"]}
|
11,130
|
hkorzeniewski/Flask-activity-registration
|
refs/heads/main
|
/website/models.py
|
from . import db
from flask_login import UserMixin
from sqlalchemy.sql import func
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.String(150), unique=True)
password = db.Column(db.String(150))
activities = db.relationship('Activity')
cardios = db.relationship('Cardio')
class Activity(db.Model):
id = db.Column(db.Integer, primary_key = True)
activity_name = db.Column(db.String(150))
duration = db.Column(db.Float)
date = db.Column(db.DateTime(timezone=True), default=func.now())
description = db.Column(db.String(200))
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
class Cardio(db.Model):
id = db.Column(db.Integer, primary_key = True)
cardio_name = db.Column(db.String(150))
place = db.Column(db.String(100))
distance = db.Column(db.Integer)
duration = db.Column(db.Float)
date = db.Column(db.DateTime(timezone=True), default=func.now())
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
|
{"/website/views.py": ["/website/models.py"]}
|
11,133
|
petrblaho/cfme_tests
|
refs/heads/master
|
/cfme/tests/cloud/test_tag_cloud.py
|
import pytest
from cfme.web_ui import Quadicon, mixins
from cfme.configure.configuration import Category, Tag
from utils.providers import setup_a_provider
from utils.randomness import generate_lowercase_random_string, generate_random_string
@pytest.fixture(scope="module")
def setup_first_cloud_provider():
setup_a_provider(prov_class="cloud", validate=True, check_existing=True)
@pytest.yield_fixture(scope="module")
def category():
cg = Category(name=generate_lowercase_random_string(size=8),
description=generate_random_string(size=32),
display_name=generate_random_string(size=32))
cg.create()
yield cg
cg.delete()
@pytest.yield_fixture(scope="module")
def tag(category):
tag = Tag(name=generate_lowercase_random_string(size=8),
display_name=generate_random_string(size=32),
category=category)
tag.create()
yield tag
tag.delete()
def test_tag_provider(setup_first_cloud_provider, tag):
"""Add a tag to a provider
"""
pytest.sel.force_navigate('clouds_providers')
Quadicon.select_first_quad()
mixins.add_tag(tag)
def test_tag_vm(setup_first_cloud_provider, tag):
"""Add a tag to a vm
"""
pytest.sel.force_navigate('clouds_instances_by_provider')
Quadicon.select_first_quad()
mixins.add_tag(tag)
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,134
|
petrblaho/cfme_tests
|
refs/heads/master
|
/cfme/tests/control/test_compliance.py
|
# -*- coding: utf-8 -*-
import re
import diaper
import pytest
from cfme.configure.configuration import VMAnalysisProfile
from cfme.control.explorer import (
Action, VMCompliancePolicy, VMControlPolicy, VMCondition, PolicyProfile)
from cfme.exceptions import VmNotFoundViaIP
from cfme.infrastructure.virtual_machines import Vm
from cfme.web_ui import flash, toolbar
from fixtures.pytest_store import store
from utils import testgen, version
from utils.appliance import Appliance, provision_appliance
from utils.log import logger
from utils.randomness import generate_random_string
from utils.update import update
from utils.wait import wait_for
PREFIX = "test_compliance_"
pytestmark = [
# TODO: Problems with fleecing configuration - revisit later
pytest.mark.ignore_stream("upstream"),
pytest.mark.meta(server_roles=["+automate", "+smartstate", "+smartproxy"]),
pytest.mark.usefixtures("provider_type"),
pytest.mark.uncollectif(lambda provider_type: provider_type in {"scvmm"}),
]
def pytest_generate_tests(metafunc):
argnames, argvalues, idlist = testgen.infra_providers(
metafunc, "vm_analysis", require_fields=True)
testgen.parametrize(metafunc, argnames, argvalues, ids=idlist, scope="module")
def wait_for_ssa_enabled():
wait_for(
lambda: not toolbar.is_greyed('Configuration', 'Perform SmartState Analysis'),
delay=10, handle_exception=True, num_sec=600, fail_func=lambda: toolbar.select("Reload"))
@pytest.yield_fixture(scope="module")
def compliance_vm(request, provider_key, provider_crud):
try:
ip_addr = re.findall(r'[0-9]+(?:\.[0-9]+){3}', store.base_url)[0]
appl_name = provider_crud.get_mgmt_system().get_vm_name_from_ip(ip_addr)
appliance = Appliance(provider_key, appl_name)
logger.info(
"The tested appliance ({}) is already on this provider ({}) so reusing it.".format(
appl_name, provider_key))
appliance.configure_fleecing()
vm = Vm(appl_name, provider_crud)
except VmNotFoundViaIP:
logger.info("Provisioning a new appliance on provider {}.".format(provider_key))
appliance = provision_appliance(
vm_name_prefix=PREFIX + "host_",
version=str(version.current_version()),
provider_name=provider_key)
request.addfinalizer(lambda: diaper(appliance.destroy))
appliance.configure(setup_fleece=True)
vm = Vm(appliance.vm_name, provider_crud)
# Do the final touches
with appliance.ipapp(browser_steal=True) as appl:
appl.set_session_timeout(86400)
provider_crud.refresh_provider_relationships()
vm.wait_to_appear()
vm.load_details()
wait_for_ssa_enabled()
yield vm
@pytest.yield_fixture(scope="module")
def analysis_profile(compliance_vm):
rand = generate_random_string()
ap = VMAnalysisProfile(
name="ap-{}".format(rand), description="ap-desc-{}".format(rand), files=[],
categories=["check_software"])
with ap:
yield ap
@pytest.fixture(scope="module")
def fleecing_vm(
request, compliance_vm, vm_analysis, provider_mgmt, provider_key, provider_crud,
analysis_profile):
logger.info("Provisioning an appliance for fleecing on {}".format(provider_key))
# TODO: When we get something smaller, use it!
appliance = provision_appliance(
vm_name_prefix=PREFIX + "for_fleece_",
version=str(version.current_version()),
provider_name=provider_key)
request.addfinalizer(lambda: diaper(appliance.destroy))
logger.info("Appliance {} provisioned".format(appliance.vm_name))
vm = Vm(appliance.vm_name, provider_crud)
provider_crud.refresh_provider_relationships()
vm.wait_to_appear()
# Assign the analysis profile
action = Action(
"Assign analysis profile {}".format(analysis_profile.name),
"Assign Profile to Analysis Task",
dict(analysis_profile=analysis_profile.name))
action.create()
request.addfinalizer(action.delete)
policy = VMControlPolicy("Analysis profile policy {}".format(generate_random_string()))
policy.create()
request.addfinalizer(policy.delete)
policy.assign_actions_to_event("VM Analysis Start", action)
analysis_pp = PolicyProfile(
"Analysis profile PP {}".format(generate_random_string()),
policies=[policy])
analysis_pp.create()
request.addfinalizer(analysis_pp.delete)
vm.assign_policy_profiles(analysis_pp.description)
request.addfinalizer(lambda: vm.unassign_policy_profiles(analysis_pp.description))
return vm
def do_scan(vm):
if vm.rediscover_if_analysis_data_present():
# policy profile assignment is lost so reassign
vm.assign_policy_profiles(*vm._assigned_pp)
def _scan():
return vm.get_detail(properties=("Lifecycle", "Last Analyzed")).lower()
original = _scan()
vm.smartstate_scan(cancel=False, from_details=True)
flash.assert_message_contain("Smart State Analysis initiated")
logger.info("Scan initiated")
wait_for(
lambda: _scan() != original,
num_sec=600, delay=5, fail_func=lambda: toolbar.select("Reload"))
logger.info("Scan finished")
def test_check_package_presence(request, fleecing_vm, ssh_client, vm_analysis, analysis_profile):
"""This test checks compliance by presence of a certain cfme-appliance package which is expected
to be present on an appliance."""
# TODO: If we step out from provisioning a full appliance for fleecing, this might need revisit
condition = VMCondition(
"Compliance testing condition {}".format(generate_random_string(size=8)),
expression=("fill_find(field=VM and Instance.Guest Applications : Name, "
"skey=STARTS WITH, value=cfme-appliance, check=Check Count, ckey= = , cvalue=1)")
)
request.addfinalizer(lambda: diaper(condition.delete))
policy = VMCompliancePolicy("Compliance {}".format(generate_random_string(size=8)))
request.addfinalizer(lambda: diaper(policy.delete))
policy.create()
policy.assign_conditions(condition)
profile = PolicyProfile(
"Compliance PP {}".format(generate_random_string(size=8)),
policies=[policy]
)
request.addfinalizer(lambda: diaper(profile.delete))
profile.create()
fleecing_vm.assign_policy_profiles(profile.description)
request.addfinalizer(lambda: fleecing_vm.unassign_policy_profiles(profile.description))
with update(analysis_profile):
analysis_profile.categories = [
"check_services", "check_accounts", "check_software", "check_vmconfig", "check_system"]
do_scan(fleecing_vm)
assert fleecing_vm.check_compliance_and_wait()
##
# File presence fleecing
@pytest.fixture(scope="function")
def check_file_name():
return "/root/{}".format(generate_random_string())
def test_check_files(request, fleecing_vm, ssh_client, check_file_name, analysis_profile):
"""This test checks presence and contents of a certain file. First the non-compliance is
enforced by not having the file, then the compliance is checked against existing file and
it is expected to be compliant.
"""
contents = generate_random_string(size=12)
condition = VMCondition(
"Compliance testing condition {}".format(generate_random_string(size=8)),
expression=("fill_find(VM and Instance.Files : Name, "
"=, {}, Check Any, Contents, INCLUDES, {})".format(check_file_name, contents))
)
request.addfinalizer(lambda: diaper(condition.delete))
policy = VMCompliancePolicy("Compliance {}".format(generate_random_string(size=8)))
request.addfinalizer(lambda: diaper(policy.delete))
policy.create()
policy.assign_conditions(condition)
profile = PolicyProfile(
"Compliance PP {}".format(generate_random_string(size=8)),
policies=[policy]
)
request.addfinalizer(lambda: diaper(profile.delete))
profile.create()
fleecing_vm.assign_policy_profiles(profile.description)
request.addfinalizer(lambda: fleecing_vm.unassign_policy_profiles(profile.description))
with update(analysis_profile):
analysis_profile.files = [("/root/*", True)]
analysis_profile.categories = [
"check_services", "check_accounts", "check_software", "check_vmconfig", "check_system"]
# Non-compliant
ssh_client.run_command("rm -f {}".format(check_file_name))
do_scan(fleecing_vm)
assert not fleecing_vm.check_compliance_and_wait()
# Compliant
ssh_client.run_command("echo {} > {}".format(contents, check_file_name))
do_scan(fleecing_vm)
assert fleecing_vm.check_compliance_and_wait()
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,135
|
petrblaho/cfme_tests
|
refs/heads/master
|
/conftest.py
|
"""
Top-level conftest.py does a couple of things:
1) Add cfme_pages repo to the sys.path automatically
2) Load a number of plugins and fixtures automatically
"""
from pkgutil import iter_modules
import pytest
import requests
import cfme.fixtures
import fixtures
import markers
import metaplugins
from cfme.fixtures.rdb import Rdb
from fixtures.pytest_store import store
from utils.log import logger
from utils.path import data_path
from utils.net import net_check
from utils.ssh import SSHClient
from utils.version import current_version
from utils.wait import TimedOutError
class _AppliancePoliceException(Exception):
def __init__(self, message, port, *args, **kwargs):
super(_AppliancePoliceException, self).__init__(message, port, *args, **kwargs)
self.message = message
self.port = port
@pytest.mark.hookwrapper
def pytest_addoption(parser):
# Create the cfme option group for use in other plugins
parser.getgroup('cfme', 'cfme: options related to cfme/miq appliances')
yield
@pytest.fixture(scope="session", autouse=True)
def set_session_timeout():
store.current_appliance.set_session_timeout(86400)
@pytest.fixture(scope="session", autouse=True)
def set_default_domain():
if current_version() < "5.3":
return # Domains are not in 5.2.x and lower
ssh_client = SSHClient()
# The command ignores the case when the Default domain is not present (: true)
result = ssh_client.run_rails_command(
"\"d = MiqAeDomain.where :name => 'Default'; puts (d) ? d.first.enabled : true\"")
if result.output.lower().strip() != "true":
# Re-enable the domain
ssh_client.run_rails_command(
"\"d = MiqAeDomain.where :name => 'Default'; d = d.first; d.enabled = true; d.save!\"")
@pytest.fixture(scope="session", autouse=True)
def fix_merkyl_workaround():
"""Workaround around merkyl not opening an iptables port for communication"""
ssh_client = SSHClient()
if ssh_client.run_command('test -f /etc/init.d/merkyl').rc == 0:
logger.info('Rudely overwriting merkyl init.d on appliance;')
local_file = data_path.join("bundles").join("merkyl").join("merkyl")
remote_file = "/etc/init.d/merkyl"
ssh_client.put_file(local_file.strpath, remote_file)
ssh_client.run_command("service merkyl restart")
@pytest.fixture(autouse=True, scope="function")
def appliance_police():
if not store.slave_manager:
return
try:
ports = {'ssh': 22, 'https': 443, 'postgres': 5432}
port_results = {pn: net_check(pp, force=True) for pn, pp in ports.items()}
for port, result in port_results.items():
if not result:
raise _AppliancePoliceException('Port {} was not contactable'.format(port), port)
status_code = requests.get(store.current_appliance.url, verify=False,
timeout=60).status_code
if status_code != 200:
raise _AppliancePoliceException('Status code was {}, should be 200'.format(
status_code), port)
return
except _AppliancePoliceException as e:
# special handling for known failure conditions
if e.port == 443:
# if the web ui worker merely crashed, give it 15 minutes
# to come back up
try:
store.current_appliance.wait_for_web_ui(900)
return
except TimedOutError:
# the UI didn't come back up after 15 minutes, and is
# probably frozen; kill it and restart
# fortunately we already check SSH is working...
store.current_appliance.restart_evm_service(900, rude=True)
# take another shot at letting the web UI come up
try:
store.current_appliance.wait_for_web_ui(900)
return
except TimedOutError:
# so much for that
pass
e_message = e.message
except Exception as e:
e_message = e.args[0]
# Regardles of the exception raised, we didn't return anywhere above
# time to call a human
msg = 'Help! My appliance {} crashed with: {}'.format(store.current_appliance.url, e_message)
store.slave_manager.message(msg)
Rdb(msg).set_trace(**{
'subject': 'RDB Breakpoint: Appliance failure',
'recipients': ['semyers@redhat.com', 'psavage@redhat.com'],
})
store.slave_manager.message('Resuming testing following remote debugging')
def _pytest_plugins_generator(*extension_pkgs):
# Finds all submodules in pytest extension packages and loads them
for extension_pkg in extension_pkgs:
path = extension_pkg.__path__
prefix = '%s.' % extension_pkg.__name__
for importer, modname, is_package in iter_modules(path, prefix):
yield modname
pytest_plugins = tuple(_pytest_plugins_generator(fixtures, markers, cfme.fixtures, metaplugins))
collect_ignore = ["tests/scenarios"]
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,136
|
petrblaho/cfme_tests
|
refs/heads/master
|
/scripts/template_tester.py
|
#!/usr/bin/env python
"""Template tester script, used to test and mark template as usable.
get:
Export bash vars to be eval'd for template testing with the jenkins runner
latest:
Export bash vars to be eval'd for getting the latest usable template
mark:
Mark a template as tested and, if it passes, usable.
"""
import os
import sys
from utils import trackerbot
from utils.log import logger
def get(api):
try:
template, provider_key, stream = trackerbot.templates_to_test(api, limit=1)[0]
except (IndexError, TypeError):
# No untested providertemplates, all is well
return 0
# Print envvar exports to be eval'd
export(
appliance_template=template,
provider_key=provider_key,
stream=stream
)
def latest(api, stream, provider_key=None):
try:
if provider_key:
prov = api.provider(provider_key).get()
res = prov['latest_templates'][stream]
else:
res = api.group(stream).get()
except IndexError:
# No templates in stream
return 1
export(
appliance_template=res['latest_template'],
provider_keys=' '.join(res['latest_template_providers'])
)
def export(**env_vars):
for varname, value in env_vars.items():
print 'export %s="%s";' % (varname, value)
print "# to import these into your bash environment: eval $(%s)" % ' '.join(sys.argv)
def mark(api, provider_key, template, usable, diagnose):
# set some defaults
diagnosis = None
build_number = None
if not usable:
build_number = os.environ.get('BUILD_NUMBER', None)
if diagnose:
# diagnose will return None on a usable appliance, so don't bother
from utils.appliance import IPAppliance
ipa = IPAppliance()
diagnosis = ipa.diagnose_evm_failure()
if diagnosis:
logger.error('Appliance failed: {}'.format(diagnosis.split(os.linesep)[0]))
trackerbot.mark_provider_template(api, provider_key, template, tested=True, usable=usable,
diagnosis=diagnosis, build_number=build_number)
def retest(api, provider_key, template):
trackerbot.mark_provider_template(api, provider_key, template, tested=False)
if __name__ == '__main__':
parser = trackerbot.cmdline_parser()
subs = parser.add_subparsers(title='commands', dest='command')
parse_get = subs.add_parser('get', help='get a template to test')
parse_get.set_defaults(func=get)
parse_latest = subs.add_parser('latest', help='get the latest usable template for a provider')
parse_latest.set_defaults(func=latest)
parse_latest.add_argument('stream', help='template stream (e.g. upstream, downstream-52z')
parse_latest.add_argument('provider_key', nargs='?', default=None)
parse_mark = subs.add_parser('mark', help='mark a tested template')
parse_mark.set_defaults(func=mark)
parse_mark.add_argument('provider_key')
parse_mark.add_argument('template')
parse_mark.add_argument('-n', '--not-usable', dest='usable', action='store_false',
default=True, help='mark template as not usable (templates are marked usable by default')
parse_mark.add_argument('-d', '--diagnose', dest='diagnose', action='store_true',
default=False, help='attempt to diagnose an unusable template and submit the result')
parse_retest = subs.add_parser('retest', help='flag a tested template for retesting')
parse_retest.set_defaults(func=retest)
parse_retest.add_argument('provider_key')
parse_retest.add_argument('template')
args = parser.parse_args()
api = trackerbot.api(args.trackerbot_url)
func_map = {
get: lambda: get(api),
latest: lambda: latest(api, args.stream, args.provider_key),
mark: lambda: mark(api, args.provider_key, args.template, args.usable, args.diagnose),
retest: lambda: retest(api, args.provider_key, args.template),
}
sys.exit(func_map[args.func]())
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,137
|
petrblaho/cfme_tests
|
refs/heads/master
|
/cfme/tests/cloud/test_tenant.py
|
import pytest
from cfme.cloud.tenant import Tenant
from utils import testgen
from utils.randomness import generate_random_string
pytest_generate_tests = testgen.generate(testgen.provider_by_type, ['openstack'],
scope='module')
@pytest.fixture
def tenant(provider_key):
return Tenant(name=generate_random_string(size=8),
description=generate_random_string(size=8),
provider_key=provider_key)
def test_tenant(provider_mgmt, tenant, provider_key):
""" Tests tenant (currently disabled)
Metadata:
test_flag: tenant
"""
print tenant.name, tenant.description, provider_key
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,138
|
petrblaho/cfme_tests
|
refs/heads/master
|
/sprout/sprout/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from contextlib import contextmanager
try:
import cPickle as pickle
except ImportError:
import pickle
from .celery import app as celery_app
assert celery_app
from django.core.cache import cache
from sprout import settings
from redis import StrictRedis
from utils.wait import wait_for
redis_client = StrictRedis(**settings.GENERAL_REDIS)
CRITICAL_SECTION_LOCK_TIME = 60
@contextmanager
def critical_section(name):
wait_for(
cache.add,
["lock-{}".format(name), 'true', CRITICAL_SECTION_LOCK_TIME],
delay=0.3, num_sec=2 * CRITICAL_SECTION_LOCK_TIME)
try:
yield
finally:
cache.delete("lock-{}".format(name))
class RedisWrapper(object):
LOCK_EXPIRE = 60
def __init__(self, client):
self.client = client
def _set(self, key, value, *args, **kwargs):
return self.client.set(str(key), pickle.dumps(value), *args, **kwargs)
def _get(self, key, *args, **kwargs):
default = kwargs.pop("default", None)
result = self.client.get(str(key), *args, **kwargs)
if result is None:
return default
return pickle.loads(result)
@contextmanager
def atomic(self):
wait_for(
cache.add,
["redis-atomic", 'true', self.LOCK_EXPIRE],
delay=0.3, num_sec=2 * self.LOCK_EXPIRE)
try:
yield self
finally:
cache.delete("redis-atomic")
def set(self, key, value, *args, **kwargs):
with self.atomic():
return self._set(key, value, *args, **kwargs)
def get(self, key, *args, **kwargs):
with self.atomic():
return self._get(key, *args, **kwargs)
def delete(self, key, *args, **kwargs):
with self.atomic():
return self.client.delete(key, *args, **kwargs)
@contextmanager
def appliances_ignored_when_renaming(self, *appliances):
with self.atomic() as client:
ignored_appliances = client._get("renaming_appliances")
if ignored_appliances is None:
ignored_appliances = set([])
for appliance in appliances:
ignored_appliances.add(appliance)
client._set("renaming_appliances", ignored_appliances)
yield
with self.atomic() as client:
ignored_appliances = client._get("renaming_appliances")
if ignored_appliances is None:
ignored_appliances = set([])
for appliance in appliances:
try:
ignored_appliances.remove(appliance)
except KeyError:
# Something worng happened, ignore
pass
client._set("renaming_appliances", ignored_appliances)
@property
def renaming_appliances(self):
return self.get("renaming_appliances") or set([])
redis = RedisWrapper(redis_client)
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,139
|
petrblaho/cfme_tests
|
refs/heads/master
|
/cfme/tests/automate/test_service_dialog.py
|
import pytest
import utils.randomness as rand
from utils.update import update
from utils import error, version
from cfme.automate.service_dialogs import ServiceDialog
pytestmark = [pytest.mark.usefixtures("logged_in")]
def test_create_service_dialog():
element_data = {
'ele_label': "ele_" + rand.generate_random_string(),
'ele_name': rand.generate_random_string(),
'ele_desc': rand.generate_random_string(),
'choose_type': "Text Box",
'default_text_box': "Default text"
}
dialog = ServiceDialog(label=rand.generate_random_string(),
description="my dialog", submit=True, cancel=True,
tab_label="tab_" + rand.generate_random_string(),
tab_desc="my tab desc",
box_label="box_" + rand.generate_random_string(),
box_desc="my box desc")
dialog.create(element_data)
def test_update_service_dialog():
element_data = {
'ele_label': "ele_" + rand.generate_random_string(),
'ele_name': rand.generate_random_string(),
'ele_desc': rand.generate_random_string(),
'choose_type': "Text Box",
'default_text_box': "Default text"
}
dialog = ServiceDialog(label=rand.generate_random_string(),
description="my dialog", submit=True, cancel=True,
tab_label="tab_" + rand.generate_random_string(),
tab_desc="my tab desc",
box_label="box_" + rand.generate_random_string(),
box_desc="my box desc")
dialog.create(element_data)
with update(dialog):
dialog.description = "my edited description"
def test_delete_service_dialog():
element_data = {
'ele_label': "ele_" + rand.generate_random_string(),
'ele_name': rand.generate_random_string(),
'ele_desc': rand.generate_random_string(),
'choose_type': "Text Box",
'default_text_box': "Default text"
}
dialog = ServiceDialog(label=rand.generate_random_string(),
description="my dialog", submit=True, cancel=True,
tab_label="tab_" + rand.generate_random_string(),
tab_desc="my tab desc",
box_label="box_" + rand.generate_random_string(),
box_desc="my box desc")
dialog.create(element_data)
dialog.delete()
def test_service_dialog_duplicate_name():
element_data = {
'ele_label': "ele_" + rand.generate_random_string(),
'ele_name': rand.generate_random_string(),
'ele_desc': rand.generate_random_string(),
'choose_type': "Text Box",
'default_text_box': "Default text"
}
dialog = ServiceDialog(label=rand.generate_random_string(),
description="my dialog", submit=True, cancel=True,
tab_label="tab_" + rand.generate_random_string(),
tab_desc="my tab desc",
box_label="box_" + rand.generate_random_string(),
box_desc="my box desc")
dialog.create(element_data)
error_msg = version.pick({
version.LOWEST: "Dialog Label has already been taken",
'5.3': "Label has already been taken"
})
with error.expected(error_msg):
dialog.create(element_data)
def test_checkbox_dialog_element():
element_data = {
'ele_label': "ele_" + rand.generate_random_string(),
'ele_name': rand.generate_random_string(),
'ele_desc': rand.generate_random_string(),
'choose_type': "Check Box",
'default_text_box': True,
'field_required': True
}
dialog = ServiceDialog(label=rand.generate_random_string(),
description="my dialog", submit=True, cancel=True,
tab_label="tab_" + rand.generate_random_string(),
tab_desc="my tab desc",
box_label="box_" + rand.generate_random_string(),
box_desc="my box desc")
dialog.create(element_data)
def test_datecontrol_dialog_element():
element_data = {
'ele_label': "ele_" + rand.generate_random_string(),
'ele_name': rand.generate_random_string(),
'ele_desc': rand.generate_random_string(),
'choose_type': "Date Control",
'field_past_dates': True
}
dialog = ServiceDialog(label=rand.generate_random_string(),
description="my dialog", submit=True, cancel=True,
tab_label="tab_" + rand.generate_random_string(),
tab_desc="my tab desc",
box_label="box_" + rand.generate_random_string(),
box_desc="my box desc")
dialog.create(element_data)
def test_dropdownlist_dialog_element():
element_data = {
'ele_label': "ele_" + rand.generate_random_string(),
'ele_name': rand.generate_random_string(),
'ele_desc': rand.generate_random_string(),
'choose_type': "Drop Down List"
}
dialog = ServiceDialog(label=rand.generate_random_string(),
description="my dialog", submit=True, cancel=True,
tab_label="tab_" + rand.generate_random_string(),
tab_desc="my tab desc",
box_label="box_" + rand.generate_random_string(),
box_desc="my box desc")
dialog.create(element_data)
def test_radiobutton_dialog_element():
element_data = {
'ele_label': "ele_" + rand.generate_random_string(),
'ele_name': rand.generate_random_string(),
'ele_desc': rand.generate_random_string(),
'choose_type': "Radio Button"
}
dialog = ServiceDialog(label=rand.generate_random_string(),
description="my dialog", submit=True, cancel=True,
tab_label="tab_" + rand.generate_random_string(),
tab_desc="my tab desc",
box_label="box_" + rand.generate_random_string(),
box_desc="my box desc")
dialog.create(element_data)
def test_tagcontrol_dialog_element():
element_data = {
'ele_label': "ele_" + rand.generate_random_string(),
'ele_name': rand.generate_random_string(),
'ele_desc': rand.generate_random_string(),
'choose_type': "Tag Control",
'field_category': "Service Level",
'field_required': True
}
dialog = ServiceDialog(label=rand.generate_random_string(),
description="my dialog", submit=True, cancel=True,
tab_label="tab_" + rand.generate_random_string(),
tab_desc="my tab desc",
box_label="box_" + rand.generate_random_string(),
box_desc="my box desc")
dialog.create(element_data)
def test_textareabox_dialog_element():
element_data = {
'ele_label': "ele_" + rand.generate_random_string(),
'ele_name': rand.generate_random_string(),
'ele_desc': rand.generate_random_string(),
'choose_type': "Text Area Box",
'field_required': True
}
dialog = ServiceDialog(label=rand.generate_random_string(),
description="my dialog", submit=True, cancel=True,
tab_label="tab_" + rand.generate_random_string(),
tab_desc="my tab desc",
box_label="box_" + rand.generate_random_string(),
box_desc="my box desc")
dialog.create(element_data)
def test_reorder_elements():
element_1_data = {
'ele_label': "ele_" + rand.generate_random_string(),
'ele_name': rand.generate_random_string(),
'ele_desc': rand.generate_random_string(),
'choose_type': "Text Box",
'default_text_box': "Default text"
}
element_2_data = {
'ele_label': "ele_" + rand.generate_random_string(),
'ele_name': rand.generate_random_string(),
'ele_desc': rand.generate_random_string(),
'choose_type': "Check Box",
'default_text_box': True,
'field_required': True
}
dialog = ServiceDialog(label=rand.generate_random_string(),
description="my dialog", submit=True, cancel=True,
tab_label="tab_" + rand.generate_random_string(),
tab_desc="my tab desc",
box_label="box_" + rand.generate_random_string(),
box_desc="my box desc")
dialog.create(element_1_data, element_2_data)
dialog.reorder_elements(dialog.box_label, element_1_data, element_2_data)
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,140
|
petrblaho/cfme_tests
|
refs/heads/master
|
/cfme/services/catalogs/myservice.py
|
from functools import partial
from cfme import web_ui as ui
from cfme.fixtures import pytest_selenium as sel
from cfme.web_ui import accordion, menu, flash, Quadicon, Region, Form, fill, form_buttons
from cfme.web_ui import toolbar as tb
from utils.update import Updateable
from utils.wait import wait_for
lifecycle_btn = partial(tb.select, "Lifecycle")
reload_func = partial(tb.select, "Reload current display")
my_service_tree = partial(accordion.tree, "Services")
details_page = Region(infoblock_type='detail')
cfg_btn = partial(tb.select, "Configuration")
policy_btn = partial(tb.select, "Policy")
retirement_form = Form(
fields=[
('retirement_date', ui.Calendar('miq_date_1')),
('retirement_warning', ui.Select("select#retirement_warn"))
])
edit_service_form = Form(
fields=[
("name", ui.Input("name")),
("description", ui.Input("description"))
])
set_ownership_form = Form(
fields=[
("select_owner", ui.Select("select#user_name")),
("select_group", ui.Select("select#group_name"))
])
edit_tags_form = Form(
fields=[
("select_value", ui.Select("select#tag_add"))
])
menu.nav.add_branch(
'my_services',
{
'service':
[
lambda ctx: my_service_tree('All Services', ctx['service_name']),
{
'retire_service_on_date': menu.nav.partial(lifecycle_btn, "Set Retirement Date"),
'edit_service': menu.nav.partial(cfg_btn, "Edit this Service"),
'service_set_ownership': menu.nav.partial(cfg_btn, "Set Ownership"),
'service_edit_tags': menu.nav.partial(policy_btn, "Edit Tags")
}
]
}
)
class MyService(Updateable):
"""Create,Edit and Delete Button Groups
Args:
service_name: The name of service to retire.
vm_name: Name of vm in the service.
retirement_date: Date to retire service.
"""
def __init__(self, service_name, vm_name):
self.service_name = service_name
self.vm_name = vm_name
def get_detail(self, properties=None):
""" Gets details from the details infoblock
Args:
*ident: An InfoBlock title, followed by the Key name
e.g. "Relationships", "Images"
Returns: A string representing the contents of the InfoBlock's value.
"""
return details_page.infoblock.text(*properties)
def retire(self):
sel.force_navigate('service',
context={'service_name': self.service_name})
lifecycle_btn("Retire this Service", invokes_alert=True)
sel.handle_alert()
flash.assert_success_message('Retirement initiated for 1 Service from the CFME Database')
wait_time_min = 1
quadicon = Quadicon(self.vm_name, "vm")
sel.click(quadicon)
detail_t = ("Power Management", "Power State")
wait_for(
lambda: self.get_detail(properties=detail_t) == "off",
fail_func=reload_func,
num_sec=wait_time_min * 120,
message="wait for service to retire"
)
assert(self.get_detail(properties=detail_t) == "off")
def retire_on_date(self, retirement_date):
sel.force_navigate('retire_service_on_date',
context={'service_name': self.service_name})
fill(retirement_form, {'retirement_date': retirement_date},
action=form_buttons.save)
wait_time_min = 1
quadicon = Quadicon(self.vm_name, "vm")
sel.click(quadicon)
detail_t = ("Power Management", "Power State")
wait_for(
lambda: self.get_detail(properties=detail_t) == "off",
fail_func=reload_func,
num_sec=wait_time_min * 120,
message="wait for service to retire"
)
assert(self.get_detail(properties=detail_t) == "off")
def update(self, name, description):
sel.force_navigate('edit_service',
context={'service_name': self.service_name})
edited_name = self.service_name + "_" + name
fill(edit_service_form, {'name': edited_name,
'description': description},
action=form_buttons.save)
flash.assert_success_message('Service "{}" was saved'.format(edited_name))
def delete(self, name):
sel.force_navigate('service',
context={'service_name': name})
cfg_btn("Remove Service from the VMDB", invokes_alert=True)
sel.handle_alert()
flash.assert_success_message('Service "{}": Delete successful'.format(name))
def set_ownership(self, owner, group):
sel.force_navigate('service_set_ownership',
context={'service_name': self.service_name})
fill(set_ownership_form, {'select_owner': owner,
'select_group': group},
action=form_buttons.save)
flash.assert_success_message('Ownership saved for selected Service')
def edit_tags(self, value):
sel.force_navigate('service_edit_tags',
context={'service_name': self.service_name})
fill(edit_tags_form, {'select_value': value},
action=form_buttons.save)
flash.assert_success_message('Tag edits were successfully saved')
def check_vm_add(self, add_vm_name):
sel.force_navigate('service',
context={'service_name': self.service_name})
quadicon = Quadicon(add_vm_name, "vm")
sel.click(quadicon)
flash.assert_no_errors()
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,141
|
petrblaho/cfme_tests
|
refs/heads/master
|
/cfme/tests/infrastructure/test_customization_template.py
|
import pytest
from cfme.infrastructure import pxe
from utils import error
from utils.blockers import BZ
from utils.randomness import generate_random_string
from utils.update import update
pytestmark = [pytest.mark.usefixtures("logged_in")]
def test_customization_template_crud():
"""Basic CRUD test for customization templates."""
template_crud = pxe.CustomizationTemplate(
name=generate_random_string(size=8),
description=generate_random_string(size=16),
image_type='RHEL-6',
script_type='Kickstart',
script_data='Testing the script')
template_crud.create()
with update(template_crud):
template_crud.name = template_crud.name + "_update"
template_crud.delete(cancel=False)
def test_name_required_error_validation():
"""Test to validate name in customization templates."""
template_name = pxe.CustomizationTemplate(
name=None,
description=generate_random_string(size=16),
image_type='RHEL-6',
script_type='Kickstart',
script_data='Testing the script')
with error.expected('Name is required'):
template_name.create()
def test_type_required_error_validation():
"""Test to validate type in customization templates."""
template_name = pxe.CustomizationTemplate(
name=generate_random_string(size=8),
description=generate_random_string(size=16),
image_type='RHEL-6',
script_type='<Choose>',
script_data='Testing the script')
with error.expected('Type is required'):
template_name.create()
def test_pxe_image_type_required_error_validation():
"""Test to validate pxe image type in customization templates."""
template_name = pxe.CustomizationTemplate(
name=generate_random_string(size=8),
description=generate_random_string(size=16),
image_type='<Choose>',
script_type='Kickstart',
script_data='Testing the script')
with error.expected("Pxe_image_type can't be blank"):
template_name.create()
@pytest.mark.meta(
blockers=[
BZ(1092951, ignore_bugs=[1083198])
]
)
def test_duplicate_name_error_validation():
"""Test to validate duplication in customization templates."""
template_name = pxe.CustomizationTemplate(
name=generate_random_string(size=8),
description=generate_random_string(size=16),
image_type='RHEL-6',
script_type='Kickstart',
script_data='Testing the script')
template_name.create()
with error.expected('Name has already been taken'):
template_name.create()
template_name.delete(cancel=False)
@pytest.mark.xfail(message='http://cfme-tests.readthedocs.org/guides/gotchas.html#'
'selenium-is-not-clicking-on-the-element-it-says-it-is')
def test_name_max_character_validation():
"""Test to validate name with maximum characters in customization templates."""
template_name = pxe.CustomizationTemplate(
name=generate_random_string(size=256),
description=generate_random_string(size=16),
image_type='RHEL-6',
script_type='Kickstart',
script_data='Testing the script')
with error.expected('Name is required'):
template_name.create()
template_name.delete(cancel=False)
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,142
|
petrblaho/cfme_tests
|
refs/heads/master
|
/cfme/tests/automate/test_namespace.py
|
# -*- coding: utf-8 -*-
# pylint: disable=E1101
# pylint: disable=W0621
import pytest
from cfme.automate.explorer import Namespace
from utils.providers import setup_a_provider
from utils.randomness import generate_random_string
from utils.update import update
from utils import version
import utils.error as error
import cfme.tests.configure.test_access_control as tac
import cfme.tests.automate as ta
pytestmark = [pytest.mark.usefixtures("logged_in")]
@pytest.fixture(
scope="function",
params=[ta.a_namespace, ta.a_namespace_with_path])
def namespace(request):
# don't test with existing paths on upstream (there aren't any)
if request.param is ta.a_namespace_with_path and version.current_version() == version.LATEST:
pytest.skip("don't test with existing paths on upstream (there aren't any)")
return request.param()
@pytest.fixture
def setup_single_provider():
setup_a_provider()
def test_namespace_crud(namespace):
namespace.create()
old_name = namespace.name
with update(namespace):
namespace.name = generate_random_string(8)
with update(namespace):
namespace.name = old_name
namespace.delete()
assert not namespace.exists()
def test_add_delete_namespace_nested(namespace):
namespace.create()
nested_ns = Namespace(name="Nested", parent=namespace)
nested_ns.create()
namespace.delete()
assert not nested_ns.exists()
@pytest.mark.meta(blockers=[1136518])
def test_duplicate_namespace_disallowed(namespace):
namespace.create()
with error.expected("Name has already been taken"):
namespace.create()
# provider needed as workaround for bz1035399
@pytest.mark.meta(blockers=[1140331])
def test_permissions_namespace_crud(setup_single_provider):
""" Tests that a namespace can be manipulated only with the right permissions"""
tac.single_task_permission_test([['Automate', 'Explorer']],
{'Namespace CRUD':
lambda: test_namespace_crud(ta.a_namespace())})
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,143
|
petrblaho/cfme_tests
|
refs/heads/master
|
/cfme/tests/automate/test_instance.py
|
import pytest
from utils.randomness import generate_random_string
from utils.update import update
import utils.error as error
import cfme.tests.automate as ta
pytestmark = [pytest.mark.usefixtures("logged_in")]
@pytest.fixture(scope='module')
def make_class(request):
return ta.make_class(request=request)
@pytest.fixture(scope="function")
def an_instance(request, make_class):
return ta.an_instance(make_class, request=request)
def test_instance_crud(an_instance):
an_instance.create()
origname = an_instance.name
with update(an_instance):
an_instance.name = generate_random_string(8)
an_instance.description = "updated"
with update(an_instance):
an_instance.name = origname
an_instance.delete()
assert not an_instance.exists()
def test_duplicate_disallowed(an_instance):
an_instance.create()
with error.expected("Name has already been taken"):
an_instance.create()
@pytest.mark.meta(blockers=[1148541])
def test_display_name_unset_from_ui(request, an_instance):
an_instance.create()
request.addfinalizer(an_instance.delete)
with update(an_instance):
an_instance.display_name = generate_random_string()
assert an_instance.exists
with update(an_instance):
an_instance.display_name = ""
assert an_instance.exists
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,144
|
petrblaho/cfme_tests
|
refs/heads/master
|
/cfme/tests/configure/test_zones.py
|
# -*- coding: utf-8 -*-
import pytest
import cfme.web_ui.flash as flash
import cfme.configure.configuration as conf
from utils.randomness import generate_random_string
from utils.update import update
@pytest.mark.smoke
def test_zone_crud(soft_assert):
zone = conf.Zone(
name=generate_random_string(size=5),
description=generate_random_string(size=8))
# CREATE
zone.create()
soft_assert(zone.exists, "The zone {} does not exist!".format(
zone.description
))
# UPDATE
old_desc = zone.description
with update(zone):
zone.description = generate_random_string(size=8)
soft_assert(zone.exists and (old_desc != zone.description),
"The zone {} was not updated!".format(
zone.description
))
# DELETE
zone.delete()
soft_assert(not zone.exists, "The zone {} exists!".format(
zone.description
))
def test_zone_add_cancel_validation():
zone = conf.Zone(
name=generate_random_string(size=5),
description=generate_random_string(size=8))
zone.create(cancel=True)
flash.assert_message_match('Add of new Miq Zone was cancelled by the user')
def test_zone_change_appliance_zone(request):
""" Tests that an appliance can be changed to another Zone """
zone = conf.Zone(
name=generate_random_string(size=5),
description=generate_random_string(size=8))
request.addfinalizer(zone.delete)
request.addfinalizer(conf.BasicInformation(appliance_zone="default").update)
zone.create()
basic_info = conf.BasicInformation(appliance_zone=zone.name)
basic_info.update()
assert zone.description == conf.server_zone_description()
basic_info = conf.BasicInformation(appliance_zone="default")
basic_info.update()
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,145
|
petrblaho/cfme_tests
|
refs/heads/master
|
/scripts/harden_security.py
|
#!/usr/bin/env python2
"""SSH into a running appliance and configure security.
Configures security on appliance(s) according to this document:
https://access.redhat.com/articles/1124753
Works for single appliance and distributed appliance configurations.
In distributed configurations, provide the hostname of the replication
parent first, and then provide the hostnames of any child appliances using
the '-c' flag.
Example usage:
Configure security for a single appliance:
configure_security.py 10.0.0.1
Configure security for distributed appliance set:
# replication parent: 10.0.0.1
# replication child: 10.0.0.2
# replication child: 10.0.0.3
configure_security.py 10.0.0.1 -c 10.0.0.2 -c 10.0.0.3
"""
import argparse
import re
import socket
import sys
from utils.conf import credentials
from utils.randomness import generate_random_string
from utils.ssh import SSHClient
from utils.wait import wait_for
def main():
parser = argparse.ArgumentParser(epilog=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('appliance',
help='hostname or ip address of parent appliance')
parser.add_argument('-c', action='append', dest='children',
help='hostname or ip address of child appliance')
args = parser.parse_args()
print "Appliance: " + args.appliance
if args.children:
for child in args.children:
print "Child: " + child
local_key_name = "v2_key_" + generate_random_string()
ssh_creds = {
'username': credentials['ssh']['username'],
'password': credentials['ssh']['password'],
}
def is_ssh_running(address):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = s.connect_ex((address, 22))
return result == 0
def generate_key(address):
with SSHClient(hostname=address, **ssh_creds) as client:
print 'Connecting to Appliance...'
status, out = client.run_command(
'ruby /var/www/miq/vmdb/tools/fix_auth.rb --key --verbose')
if status != 0:
print 'Creating new encryption key failed.'
print out
sys.exit(1)
else:
print 'New encryption key created.'
if args.children:
# Only copy locally if needed for child appliances
client.get_file('/var/www/miq/vmdb/certs/v2_key',
local_key_name)
def update_db_yaml(address):
with SSHClient(hostname=address, **ssh_creds) as client:
client.run_command('cd /var/www/miq/vmdb')
status, out = client.run_rails_command(
'\'puts MiqPassword.encrypt("smartvm");\'')
if status != 0:
print 'Retrieving encrypted db password failed on %s' % address
sys.exit(1)
else:
encrypted_pass = out
status, out = client.run_command(
('cd /var/www/miq/vmdb; '
'sed -i.`date +%m-%d-%Y` "s/password:'
' .*/password: {}/g" config/database.yml'.format(re.escape(encrypted_pass))))
if status != 0:
print 'Updating database.yml failed on %s' % address
print out
sys.exit(1)
else:
print 'Updating database.yml succeeded on %s' % address
def update_password(address):
with SSHClient(hostname=address, **ssh_creds) as client:
status, out = client.run_command(
'ruby /var/www/miq/vmdb/tools/fix_auth.rb --hostname localhost --password smartvm')
if status != 0:
print 'Updating DB password failed on %s' % address
print out
sys.exit(1)
else:
print 'DB password updated on %s' % address
def put_key(address):
print 'copying key to %s' % address
with SSHClient(hostname=address, **ssh_creds) as client:
client.put_file(local_key_name, '/var/www/miq/vmdb/certs/v2_key')
def restart_appliance(address):
print 'Restarting evmserverd on %s' % address
with SSHClient(hostname=address, **ssh_creds) as client:
status, out = client.run_command('service evmserverd restart')
if status != 0:
print "Restarting evmserverd failed on %s" % address
sys.exit(1)
else:
print "Restarting succeeded on %s" % address
# make sure ssh is ready on each appliance
wait_for(func=is_ssh_running, func_args=[args.appliance], delay=10, num_sec=600)
# generate key on master appliance
generate_key(args.appliance)
update_db_yaml(args.appliance)
# copy to other appliances
if args.children:
for child in args.children:
wait_for(func=is_ssh_running, func_args=[child], delay=10, num_sec=600)
put_key(child)
update_db_yaml(child)
# restart master appliance (and children, if provided)
restart_appliance(args.appliance)
if args.children:
for child in args.children:
restart_appliance(child)
print "Appliance(s) restarted with new key in place."
# update encrypted passwords in each database-owning appliance.
update_password(args.appliance)
if args.children:
for child in args.children:
update_password(child)
# Restart again!
restart_appliance(args.appliance)
if args.children:
for child in args.children:
restart_appliance(child)
print "Done!"
if __name__ == '__main__':
sys.exit(main())
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,146
|
petrblaho/cfme_tests
|
refs/heads/master
|
/cfme/fixtures/smtp.py
|
# -*- coding: utf-8 -*-
"""This module provides a fixture useful for checking the e-mails arrived.
Main use is of fixture :py:meth:`smtp_test`, which is function scoped. There is also
a :py:meth:`smtp_test_module` fixture for which the smtp_test is just a function-scoped wrapper
to speed things up. The base of all this is the session-scoped _smtp_test_session that keeps care
about the collector.
"""
import pytest
import signal
import subprocess
import time
from cfme.configure import configuration
from fixtures.artifactor_plugin import art_client, get_test_idents
from utils.conf import env
from utils.log import create_logger
from utils.net import random_port, my_ip_address, net_check_remote
from utils.path import scripts_path
from utils.smtp_collector_client import SMTPCollectorClient
logger = create_logger('emails')
@pytest.fixture(scope="function")
def smtp_test(request):
"""Fixture, which prepares the appliance for e-mail capturing tests
Returns: :py:class:`util.smtp_collector_client.SMTPCollectorClient` instance.
"""
logger.info("Preparing start for e-mail collector")
ports = env.get("mail_collector", {}).get("ports", {})
mail_server_port = ports.get("smtp", None) or random_port()
mail_query_port = ports.get("json", None) or random_port()
my_ip = my_ip_address()
logger.info("Mind that it needs ports {} and {} open".format(mail_query_port, mail_server_port))
smtp_conf = configuration.SMTPSettings(
host=my_ip,
port=mail_server_port,
auth="none",
)
smtp_conf.update()
server_filename = scripts_path.join('smtp_collector.py').strpath
server_command = server_filename + " --smtp-port {} --query-port {}".format(
mail_server_port,
mail_query_port
)
logger.info("Starting mail collector {}".format(server_command))
collector = None
def _finalize():
if collector is None:
return
logger.info("Sending KeyboardInterrupt to collector")
collector.send_signal(signal.SIGINT)
time.sleep(2)
if collector.poll() is None:
logger.info("Sending SIGTERM to collector")
collector.send_signal(signal.SIGTERM)
time.sleep(5)
if collector.poll() is None:
logger.info("Sending SIGKILL to collector")
collector.send_signal(signal.SIGKILL)
collector.wait()
logger.info("Collector finished")
collector = subprocess.Popen(server_command, shell=True)
request.addfinalizer(_finalize)
logger.info("Collector pid {}".format(collector.pid))
logger.info("Waiting for collector to become alive.")
time.sleep(3)
assert collector.poll() is None, "Collector has died. Something must be blocking selected ports"
logger.info("Collector alive")
query_port_open = net_check_remote(mail_query_port, my_ip, force=True)
server_port_open = net_check_remote(mail_server_port, my_ip, force=True)
assert query_port_open and server_port_open,\
'Ports {} and {} on the machine executing the tests are closed.\n'\
'The ports are randomly chosen -> turn firewall off.'\
.format(mail_query_port, mail_server_port)
client = SMTPCollectorClient(
my_ip,
mail_query_port
)
client.set_test_name(request.node.name)
client.clear_database()
return client
@pytest.mark.hookwrapper
def pytest_runtest_call(item):
try:
yield
finally:
if "smtp_test" not in item.funcargs:
return
name, location = get_test_idents(item)
art_client.fire_hook(
"filedump",
test_name=name,
test_location=location,
filename="emails.html",
contents=item.funcargs["smtp_test"].get_html_report(),
fd_ident="emails"
)
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
11,147
|
petrblaho/cfme_tests
|
refs/heads/master
|
/cfme/tests/automate/test_method.py
|
import pytest
from cfme.automate.explorer import Namespace, Class, Method
from utils.randomness import generate_random_string
from utils.update import update
import utils.error as error
pytestmark = [pytest.mark.usefixtures("logged_in")]
def _make_namespace():
name = generate_random_string(8)
description = generate_random_string(32)
ns = Namespace(name=name, description=description)
ns.create()
return ns
def _make_class():
name = generate_random_string(8)
description = generate_random_string(32)
cls = Class(name=name, description=description, namespace=_make_namespace())
cls.create()
return cls
@pytest.fixture(scope='module')
def a_class():
return _make_class()
@pytest.fixture
def a_method(a_class):
return Method(name=generate_random_string(8),
data="foo.bar()",
cls=a_class)
def test_method_crud(a_method):
a_method.create()
origname = a_method.name
with update(a_method):
a_method.name = generate_random_string(8)
a_method.data = "bar"
with update(a_method):
a_method.name = origname
a_method.delete()
assert not a_method.exists()
def test_duplicate_method_disallowed(a_method):
a_method.create()
with error.expected("Name has already been taken"):
a_method.create()
|
{"/cfme/tests/cloud/test_tag_cloud.py": ["/utils/randomness.py"], "/cfme/tests/control/test_compliance.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_service_dialog.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_customization_template.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_namespace.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/automate/test_instance.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_zones.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_method.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_provisioning_dialogs.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_provisioning.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_vm_discovery.py": ["/utils/randomness.py"], "/cfme/tests/services/test_dynamicdd_dialogelement.py": ["/cfme/services/catalogs/service_catalogs.py", "/utils/randomness.py"], "/cfme/tests/services/test_catalog_item.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_chargeback.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_tag_infrastructure.py": ["/utils/randomness.py"], "/cfme/tests/infrastructure/test_system_image_type.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_report_corresponds.py": ["/utils/randomness.py"], "/cfme/tests/storage/test_storage_crud.py": ["/utils/randomness.py"], "/cfme/tests/automate/test_class.py": ["/utils/randomness.py", "/cfme/tests/automate/__init__.py"], "/cfme/tests/configure/test_docs.py": ["/cfme/configure/about.py"], "/cfme/tests/services/test_operations.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_timeprofile.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag.py": ["/utils/randomness.py"], "/cfme/tests/automate/__init__.py": ["/utils/randomness.py"], "/cfme/tests/intelligence/reports/test_widgets.py": ["/utils/randomness.py"], "/cfme/tests/configure/test_tag_category.py": ["/utils/randomness.py"], "/cfme/tests/cloud/test_instance_power_control.py": ["/utils/randomness.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.