sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
geekcomputers/Python:ML/src/python/neuralforge/nn/modules.py | import torch
import torch.nn as nn
import torch.nn.functional as F
from typing import Optional, Tuple
import math
class DynamicConv2d(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, groups=1):
super().__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = kernel_size
self.stride = stride
self.padding = padding
self.groups = groups
self.weight = nn.Parameter(torch.randn(out_channels, in_channels // groups, kernel_size, kernel_size))
self.bias = nn.Parameter(torch.zeros(out_channels))
nn.init.kaiming_normal_(self.weight, mode='fan_out', nonlinearity='relu')
def forward(self, x):
return F.conv2d(x, self.weight, self.bias, self.stride, self.padding, groups=self.groups)
class DynamicLinear(nn.Module):
def __init__(self, in_features, out_features, bias=True):
super().__init__()
self.in_features = in_features
self.out_features = out_features
self.weight = nn.Parameter(torch.randn(out_features, in_features))
if bias:
self.bias = nn.Parameter(torch.zeros(out_features))
else:
self.register_parameter('bias', None)
nn.init.kaiming_uniform_(self.weight, a=math.sqrt(5))
if self.bias is not None:
fan_in, _ = nn.init._calculate_fan_in_and_fan_out(self.weight)
bound = 1 / math.sqrt(fan_in)
nn.init.uniform_(self.bias, -bound, bound)
def forward(self, x):
return F.linear(x, self.weight, self.bias)
class AdaptiveBatchNorm2d(nn.Module):
def __init__(self, num_features, eps=1e-5, momentum=0.1):
super().__init__()
self.num_features = num_features
self.eps = eps
self.momentum = momentum
self.weight = nn.Parameter(torch.ones(num_features))
self.bias = nn.Parameter(torch.zeros(num_features))
self.register_buffer('running_mean', torch.zeros(num_features))
self.register_buffer('running_var', torch.ones(num_features))
self.register_buffer('num_batches_tracked', torch.tensor(0, dtype=torch.long))
def forward(self, x):
if self.training:
mean = x.mean([0, 2, 3])
var = x.var([0, 2, 3], unbiased=False)
with torch.no_grad():
self.running_mean = (1 - self.momentum) * self.running_mean + self.momentum * mean
self.running_var = (1 - self.momentum) * self.running_var + self.momentum * var
self.num_batches_tracked += 1
x_normalized = (x - mean[None, :, None, None]) / torch.sqrt(var[None, :, None, None] + self.eps)
else:
x_normalized = (x - self.running_mean[None, :, None, None]) / torch.sqrt(self.running_var[None, :, None, None] + self.eps)
return self.weight[None, :, None, None] * x_normalized + self.bias[None, :, None, None]
class LayerNorm(nn.Module):
def __init__(self, normalized_shape, eps=1e-5):
super().__init__()
self.normalized_shape = normalized_shape
self.eps = eps
self.weight = nn.Parameter(torch.ones(normalized_shape))
self.bias = nn.Parameter(torch.zeros(normalized_shape))
def forward(self, x):
mean = x.mean(-1, keepdim=True)
std = x.std(-1, keepdim=True)
return self.weight * (x - mean) / (std + self.eps) + self.bias
class GroupNorm(nn.Module):
def __init__(self, num_groups, num_channels, eps=1e-5):
super().__init__()
self.num_groups = num_groups
self.num_channels = num_channels
self.eps = eps
self.weight = nn.Parameter(torch.ones(num_channels))
self.bias = nn.Parameter(torch.zeros(num_channels))
def forward(self, x):
N, C, H, W = x.shape
x = x.reshape(N, self.num_groups, C // self.num_groups, H, W)
mean = x.mean([2, 3, 4], keepdim=True)
var = x.var([2, 3, 4], keepdim=True)
x = (x - mean) / torch.sqrt(var + self.eps)
x = x.reshape(N, C, H, W)
return x * self.weight[None, :, None, None] + self.bias[None, :, None, None]
class DropPath(nn.Module):
def __init__(self, drop_prob=0.0):
super().__init__()
self.drop_prob = drop_prob
def forward(self, x):
if self.drop_prob == 0.0 or not self.training:
return x
keep_prob = 1 - self.drop_prob
shape = (x.shape[0],) + (1,) * (x.ndim - 1)
random_tensor = keep_prob + torch.rand(shape, dtype=x.dtype, device=x.device)
random_tensor.floor_()
output = x.div(keep_prob) * random_tensor
return output
class GlobalAvgPool2d(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
return x.mean([2, 3])
class GlobalMaxPool2d(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
return x.max(dim=2)[0].max(dim=2)[0]
class AdaptiveAvgMaxPool2d(nn.Module):
def __init__(self):
super().__init__()
self.avg_pool = GlobalAvgPool2d()
self.max_pool = GlobalMaxPool2d()
def forward(self, x):
avg = self.avg_pool(x)
max_val = self.max_pool(x)
return torch.cat([avg, max_val], dim=1)
class Flatten(nn.Module):
def __init__(self, start_dim=1):
super().__init__()
self.start_dim = start_dim
def forward(self, x):
return x.flatten(self.start_dim)
class SqueezeExcitation(nn.Module):
def __init__(self, channels, reduction=16):
super().__init__()
self.fc1 = nn.Linear(channels, channels // reduction)
self.fc2 = nn.Linear(channels // reduction, channels)
def forward(self, x):
b, c, _, _ = x.size()
se = x.mean([2, 3])
se = F.relu(self.fc1(se))
se = torch.sigmoid(self.fc2(se))
return x * se.view(b, c, 1, 1)
class SpatialAttention(nn.Module):
def __init__(self, kernel_size=7):
super().__init__()
self.conv = nn.Conv2d(2, 1, kernel_size, padding=kernel_size // 2)
def forward(self, x):
avg_out = torch.mean(x, dim=1, keepdim=True)
max_out, _ = torch.max(x, dim=1, keepdim=True)
attention = torch.cat([avg_out, max_out], dim=1)
attention = torch.sigmoid(self.conv(attention))
return x * attention
class CBAM(nn.Module):
def __init__(self, channels, reduction=16, kernel_size=7):
super().__init__()
self.channel_attention = SqueezeExcitation(channels, reduction)
self.spatial_attention = SpatialAttention(kernel_size)
def forward(self, x):
x = self.channel_attention(x)
x = self.spatial_attention(x)
return x
| {
"repo_id": "geekcomputers/Python",
"file_path": "ML/src/python/neuralforge/nn/modules.py",
"license": "MIT License",
"lines": 154,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:ML/src/python/neuralforge/optim/optimizers.py | import torch
from torch.optim.optimizer import Optimizer
import math
class AdamW(Optimizer):
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, weight_decay=0.01, amsgrad=False):
if lr < 0.0:
raise ValueError(f"Invalid learning rate: {lr}")
if eps < 0.0:
raise ValueError(f"Invalid epsilon value: {eps}")
if not 0.0 <= betas[0] < 1.0:
raise ValueError(f"Invalid beta parameter at index 0: {betas[0]}")
if not 0.0 <= betas[1] < 1.0:
raise ValueError(f"Invalid beta parameter at index 1: {betas[1]}")
defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay, amsgrad=amsgrad)
super().__init__(params, defaults)
def step(self, closure=None):
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data
if grad.is_sparse:
raise RuntimeError('AdamW does not support sparse gradients')
amsgrad = group['amsgrad']
state = self.state[p]
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p.data)
state['exp_avg_sq'] = torch.zeros_like(p.data)
if amsgrad:
state['max_exp_avg_sq'] = torch.zeros_like(p.data)
exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']
if amsgrad:
max_exp_avg_sq = state['max_exp_avg_sq']
beta1, beta2 = group['betas']
state['step'] += 1
p.data.mul_(1 - group['lr'] * group['weight_decay'])
exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1)
exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=1 - beta2)
if amsgrad:
torch.max(max_exp_avg_sq, exp_avg_sq, out=max_exp_avg_sq)
denom = max_exp_avg_sq.sqrt().add_(group['eps'])
else:
denom = exp_avg_sq.sqrt().add_(group['eps'])
bias_correction1 = 1 - beta1 ** state['step']
bias_correction2 = 1 - beta2 ** state['step']
step_size = group['lr'] * math.sqrt(bias_correction2) / bias_correction1
p.data.addcdiv_(exp_avg, denom, value=-step_size)
return loss
class LAMB(Optimizer):
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-6, weight_decay=0.01):
defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay)
super().__init__(params, defaults)
def step(self, closure=None):
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data
state = self.state[p]
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p.data)
state['exp_avg_sq'] = torch.zeros_like(p.data)
exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']
beta1, beta2 = group['betas']
state['step'] += 1
exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1)
exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=1 - beta2)
bias_correction1 = 1 - beta1 ** state['step']
bias_correction2 = 1 - beta2 ** state['step']
exp_avg_hat = exp_avg / bias_correction1
exp_avg_sq_hat = exp_avg_sq / bias_correction2
update = exp_avg_hat / (exp_avg_sq_hat.sqrt() + group['eps'])
update.add_(p.data, alpha=group['weight_decay'])
weight_norm = p.data.norm()
update_norm = update.norm()
if weight_norm > 0 and update_norm > 0:
trust_ratio = weight_norm / update_norm
else:
trust_ratio = 1.0
p.data.add_(update, alpha=-group['lr'] * trust_ratio)
return loss
class RAdam(Optimizer):
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, weight_decay=0):
defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay)
super().__init__(params, defaults)
def step(self, closure=None):
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data
state = self.state[p]
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p.data)
state['exp_avg_sq'] = torch.zeros_like(p.data)
exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']
beta1, beta2 = group['betas']
state['step'] += 1
exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1)
exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=1 - beta2)
buffered = [[None, None, None] for _ in range(10)]
rho_inf = 2 / (1 - beta2) - 1
rho_t = rho_inf - 2 * state['step'] * (beta2 ** state['step']) / (1 - beta2 ** state['step'])
if rho_t > 4:
bias_correction1 = 1 - beta1 ** state['step']
bias_correction2 = 1 - beta2 ** state['step']
rt = math.sqrt(
(rho_t - 4) * (rho_t - 2) * rho_inf / ((rho_inf - 4) * (rho_inf - 2) * rho_t)
)
denom = (exp_avg_sq.sqrt() / math.sqrt(bias_correction2)).add_(group['eps'])
step_size = group['lr'] * rt / bias_correction1
p.data.addcdiv_(exp_avg, denom, value=-step_size)
else:
bias_correction1 = 1 - beta1 ** state['step']
step_size = group['lr'] / bias_correction1
p.data.add_(exp_avg, alpha=-step_size)
if group['weight_decay'] != 0:
p.data.add_(p.data, alpha=-group['weight_decay'] * group['lr'])
return loss
class AdaBound(Optimizer):
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), final_lr=0.1, gamma=1e-3, eps=1e-8, weight_decay=0):
defaults = dict(lr=lr, betas=betas, final_lr=final_lr, gamma=gamma, eps=eps, weight_decay=weight_decay)
super().__init__(params, defaults)
def step(self, closure=None):
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data
state = self.state[p]
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p.data)
state['exp_avg_sq'] = torch.zeros_like(p.data)
exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']
beta1, beta2 = group['betas']
state['step'] += 1
if group['weight_decay'] != 0:
grad.add_(p.data, alpha=group['weight_decay'])
exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1)
exp_avg_sq.mul_(beta2).addcmul_(grad, grad, value=1 - beta2)
bias_correction1 = 1 - beta1 ** state['step']
bias_correction2 = 1 - beta2 ** state['step']
step_size = group['lr'] * math.sqrt(bias_correction2) / bias_correction1
final_lr = group['final_lr'] * group['lr'] / group['lr']
lower_bound = final_lr * (1 - 1 / (group['gamma'] * state['step'] + 1))
upper_bound = final_lr * (1 + 1 / (group['gamma'] * state['step']))
denom = exp_avg_sq.sqrt().add_(group['eps'])
step_size_clipped = torch.full_like(denom, step_size).div_(denom).clamp_(lower_bound, upper_bound).mul_(exp_avg)
p.data.add_(step_size_clipped, alpha=-1)
return loss
class Lookahead(Optimizer):
def __init__(self, optimizer, k=5, alpha=0.5):
self.optimizer = optimizer
self.k = k
self.alpha = alpha
self.param_groups = self.optimizer.param_groups
self.state = {}
for group in self.param_groups:
group['counter'] = 0
def update(self, group):
for fast_p in group['params']:
if fast_p.grad is None:
continue
param_state = self.state[fast_p]
if 'slow_buffer' not in param_state:
param_state['slow_buffer'] = torch.empty_like(fast_p.data)
param_state['slow_buffer'].copy_(fast_p.data)
slow = param_state['slow_buffer']
slow.add_(fast_p.data - slow, alpha=self.alpha)
fast_p.data.copy_(slow)
def step(self, closure=None):
loss = self.optimizer.step(closure)
for group in self.param_groups:
group['counter'] += 1
if group['counter'] >= self.k:
self.update(group)
group['counter'] = 0
return loss
def state_dict(self):
return {
'state': self.state,
'optimizer': self.optimizer.state_dict(),
'param_groups': self.param_groups,
}
| {
"repo_id": "geekcomputers/Python",
"file_path": "ML/src/python/neuralforge/optim/optimizers.py",
"license": "MIT License",
"lines": 200,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:ML/src/python/neuralforge/optim/schedulers.py | import torch
from torch.optim.lr_scheduler import _LRScheduler
import math
class WarmupScheduler(_LRScheduler):
def __init__(self, optimizer, warmup_epochs, base_scheduler=None, last_epoch=-1):
self.warmup_epochs = warmup_epochs
self.base_scheduler = base_scheduler
super().__init__(optimizer, last_epoch)
def get_lr(self):
if self.last_epoch < self.warmup_epochs:
return [base_lr * (self.last_epoch + 1) / self.warmup_epochs for base_lr in self.base_lrs]
if self.base_scheduler is not None:
return self.base_scheduler.get_last_lr()
return self.base_lrs
def step(self, epoch=None):
if self.last_epoch < self.warmup_epochs:
super().step(epoch)
elif self.base_scheduler is not None:
self.base_scheduler.step(epoch)
class CosineAnnealingWarmRestarts(_LRScheduler):
def __init__(self, optimizer, T_0, T_mult=1, eta_min=0, last_epoch=-1):
self.T_0 = T_0
self.T_mult = T_mult
self.eta_min = eta_min
self.T_cur = last_epoch
self.T_i = T_0
super().__init__(optimizer, last_epoch)
def get_lr(self):
return [
self.eta_min + (base_lr - self.eta_min) * (1 + math.cos(math.pi * self.T_cur / self.T_i)) / 2
for base_lr in self.base_lrs
]
def step(self, epoch=None):
if epoch is None:
epoch = self.last_epoch + 1
self.T_cur = self.T_cur + 1
if self.T_cur >= self.T_i:
self.T_cur = self.T_cur - self.T_i
self.T_i = self.T_i * self.T_mult
else:
if epoch < 0:
raise ValueError("Expected non-negative epoch, but got {}".format(epoch))
if epoch >= self.T_0:
if self.T_mult == 1:
self.T_cur = epoch % self.T_0
else:
n = int(math.log((epoch / self.T_0 * (self.T_mult - 1) + 1), self.T_mult))
self.T_cur = epoch - self.T_0 * (self.T_mult ** n - 1) / (self.T_mult - 1)
self.T_i = self.T_0 * self.T_mult ** n
else:
self.T_i = self.T_0
self.T_cur = epoch
self.last_epoch = math.floor(epoch)
for param_group, lr in zip(self.optimizer.param_groups, self.get_lr()):
param_group['lr'] = lr
class OneCycleLR(_LRScheduler):
def __init__(self, optimizer, max_lr, total_steps, pct_start=0.3, anneal_strategy='cos',
div_factor=25.0, final_div_factor=1e4, last_epoch=-1):
self.max_lr = max_lr if isinstance(max_lr, list) else [max_lr] * len(optimizer.param_groups)
self.total_steps = total_steps
self.pct_start = pct_start
self.anneal_strategy = anneal_strategy
self.div_factor = div_factor
self.final_div_factor = final_div_factor
self.initial_lr = [lr / self.div_factor for lr in self.max_lr]
self.min_lr = [lr / self.final_div_factor for lr in self.max_lr]
super().__init__(optimizer, last_epoch)
def get_lr(self):
step_num = self.last_epoch
if step_num > self.total_steps:
return self.min_lr
if step_num <= self.pct_start * self.total_steps:
pct = step_num / (self.pct_start * self.total_steps)
return [initial + (maximum - initial) * pct
for initial, maximum in zip(self.initial_lr, self.max_lr)]
else:
pct = (step_num - self.pct_start * self.total_steps) / ((1 - self.pct_start) * self.total_steps)
if self.anneal_strategy == 'cos':
return [minimum + (maximum - minimum) * (1 + math.cos(math.pi * pct)) / 2
for minimum, maximum in zip(self.min_lr, self.max_lr)]
else:
return [maximum - (maximum - minimum) * pct
for minimum, maximum in zip(self.min_lr, self.max_lr)]
class PolynomialLR(_LRScheduler):
def __init__(self, optimizer, total_iters, power=1.0, last_epoch=-1):
self.total_iters = total_iters
self.power = power
super().__init__(optimizer, last_epoch)
def get_lr(self):
if self.last_epoch == 0 or self.last_epoch > self.total_iters:
return [group['lr'] for group in self.optimizer.param_groups]
decay_factor = ((1.0 - self.last_epoch / self.total_iters) / (1.0 - (self.last_epoch - 1) / self.total_iters)) ** self.power
return [group['lr'] * decay_factor for group in self.optimizer.param_groups]
class LinearWarmupCosineAnnealingLR(_LRScheduler):
def __init__(self, optimizer, warmup_epochs, max_epochs, warmup_start_lr=0.0, eta_min=0.0, last_epoch=-1):
self.warmup_epochs = warmup_epochs
self.max_epochs = max_epochs
self.warmup_start_lr = warmup_start_lr
self.eta_min = eta_min
super().__init__(optimizer, last_epoch)
def get_lr(self):
if self.last_epoch < self.warmup_epochs:
alpha = self.last_epoch / self.warmup_epochs
return [self.warmup_start_lr + (base_lr - self.warmup_start_lr) * alpha for base_lr in self.base_lrs]
else:
progress = (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)
return [self.eta_min + (base_lr - self.eta_min) * 0.5 * (1.0 + math.cos(math.pi * progress))
for base_lr in self.base_lrs]
class ExponentialWarmup(_LRScheduler):
def __init__(self, optimizer, warmup_epochs, gamma=0.9, last_epoch=-1):
self.warmup_epochs = warmup_epochs
self.gamma = gamma
super().__init__(optimizer, last_epoch)
def get_lr(self):
if self.last_epoch < self.warmup_epochs:
return [base_lr * (self.last_epoch + 1) / self.warmup_epochs for base_lr in self.base_lrs]
return [base_lr * self.gamma ** (self.last_epoch - self.warmup_epochs) for base_lr in self.base_lrs] | {
"repo_id": "geekcomputers/Python",
"file_path": "ML/src/python/neuralforge/optim/schedulers.py",
"license": "MIT License",
"lines": 117,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:ML/src/python/neuralforge/trainer.py | import torch
import torch.nn as nn
import torch.amp as amp
from torch.utils.data import DataLoader
from typing import Optional, Dict, Any, Callable
import time
import os
from tqdm import tqdm
from .utils.logger import Logger
from .utils.metrics import MetricsTracker
from .config import Config
class Trainer:
def __init__(
self,
model: nn.Module,
train_loader: DataLoader,
val_loader: Optional[DataLoader],
optimizer: torch.optim.Optimizer,
criterion: nn.Module,
config: Config,
scheduler: Optional[Any] = None,
device: Optional[str] = None
):
self.model = model
self.train_loader = train_loader
self.val_loader = val_loader
self.optimizer = optimizer
self.criterion = criterion
self.config = config
self.scheduler = scheduler
self.device = device or config.device
self.model.to(self.device)
self.scaler = amp.GradScaler('cuda') if config.use_amp and self.device == 'cuda' else None
self.logger = Logger(config.log_dir, config.model_name)
self.metrics = MetricsTracker()
self.current_epoch = 0
self.global_step = 0
self.best_val_loss = float('inf')
os.makedirs(config.model_dir, exist_ok=True)
self.logger.info(f"Trainer initialized with device: {self.device}")
self.logger.info(f"Model parameters: {sum(p.numel() for p in model.parameters()):,}")
self.logger.info(f"Trainable parameters: {sum(p.numel() for p in model.parameters() if p.requires_grad):,}")
def train_epoch(self) -> Dict[str, float]:
self.model.train()
epoch_loss = 0.0
correct = 0
total = 0
pbar = tqdm(self.train_loader, desc=f"Epoch {self.current_epoch + 1}/{self.config.epochs}")
for batch_idx, (inputs, targets) in enumerate(pbar):
inputs = inputs.to(self.device, non_blocking=True)
targets = targets.to(self.device, non_blocking=True)
self.optimizer.zero_grad(set_to_none=True)
if self.scaler is not None:
with amp.autocast('cuda'):
outputs = self.model(inputs)
loss = self.criterion(outputs, targets)
self.scaler.scale(loss).backward()
if self.config.grad_clip > 0:
self.scaler.unscale_(self.optimizer)
torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.config.grad_clip)
self.scaler.step(self.optimizer)
self.scaler.update()
else:
outputs = self.model(inputs)
loss = self.criterion(outputs, targets)
loss.backward()
if self.config.grad_clip > 0:
torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.config.grad_clip)
self.optimizer.step()
epoch_loss += loss.item()
_, predicted = outputs.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
self.global_step += 1
if batch_idx % 10 == 0:
pbar.set_postfix({
'loss': f'{loss.item():.4f}',
'acc': f'{100. * correct / total:.2f}%'
})
avg_loss = epoch_loss / len(self.train_loader)
accuracy = 100. * correct / total
return {'loss': avg_loss, 'accuracy': accuracy}
def validate(self) -> Dict[str, float]:
if self.val_loader is None:
return {}
self.model.eval()
val_loss = 0.0
correct = 0
total = 0
with torch.no_grad():
for inputs, targets in tqdm(self.val_loader, desc="Validation"):
inputs = inputs.to(self.device, non_blocking=True)
targets = targets.to(self.device, non_blocking=True)
if self.scaler is not None:
with amp.autocast('cuda'):
outputs = self.model(inputs)
loss = self.criterion(outputs, targets)
else:
outputs = self.model(inputs)
loss = self.criterion(outputs, targets)
val_loss += loss.item()
_, predicted = outputs.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
avg_loss = val_loss / len(self.val_loader)
accuracy = 100. * correct / total
return {'loss': avg_loss, 'accuracy': accuracy}
def train(self):
self.logger.info("Starting training...")
start_time = time.time()
for epoch in range(self.config.epochs):
self.current_epoch = epoch
epoch_start = time.time()
train_metrics = self.train_epoch()
val_metrics = self.validate()
if self.scheduler is not None:
if isinstance(self.scheduler, torch.optim.lr_scheduler.ReduceLROnPlateau):
self.scheduler.step(val_metrics.get('loss', train_metrics['loss']))
else:
self.scheduler.step()
current_lr = self.optimizer.param_groups[0]['lr']
epoch_time = time.time() - epoch_start
self.logger.info(
f"Epoch {epoch + 1}/{self.config.epochs} | "
f"Train Loss: {train_metrics['loss']:.4f} | "
f"Train Acc: {train_metrics['accuracy']:.2f}% | "
f"Val Loss: {val_metrics.get('loss', 0):.4f} | "
f"Val Acc: {val_metrics.get('accuracy', 0):.2f}% | "
f"LR: {current_lr:.6f} | "
f"Time: {epoch_time:.2f}s"
)
self.metrics.update({
'epoch': epoch + 1,
'train_loss': train_metrics['loss'],
'train_acc': train_metrics['accuracy'],
'val_loss': val_metrics.get('loss', 0),
'val_acc': val_metrics.get('accuracy', 0),
'lr': current_lr,
'time': epoch_time
})
if (epoch + 1) % self.config.checkpoint_freq == 0:
self.save_checkpoint(f'checkpoint_epoch_{epoch + 1}.pt')
if val_metrics and val_metrics['loss'] < self.best_val_loss:
self.best_val_loss = val_metrics['loss']
self.save_checkpoint('best_model.pt')
self.logger.info(f"New best model saved with val_loss: {self.best_val_loss:.4f}")
total_time = time.time() - start_time
self.logger.info(f"Training completed in {total_time / 3600:.2f} hours")
self.save_checkpoint('final_model.pt')
self.metrics.save(os.path.join(self.config.log_dir, 'metrics.json'))
def save_checkpoint(self, filename: str):
checkpoint_path = os.path.join(self.config.model_dir, filename)
checkpoint = {
'epoch': self.current_epoch,
'global_step': self.global_step,
'model_state_dict': self.model.state_dict(),
'optimizer_state_dict': self.optimizer.state_dict(),
'best_val_loss': self.best_val_loss,
'config': self.config,
}
if self.scheduler is not None:
checkpoint['scheduler_state_dict'] = self.scheduler.state_dict()
if self.scaler is not None:
checkpoint['scaler_state_dict'] = self.scaler.state_dict()
torch.save(checkpoint, checkpoint_path)
self.logger.info(f"Checkpoint saved: {checkpoint_path}")
def load_checkpoint(self, checkpoint_path: str):
self.logger.info(f"Loading checkpoint: {checkpoint_path}")
checkpoint = torch.load(checkpoint_path, map_location=self.device)
self.model.load_state_dict(checkpoint['model_state_dict'])
self.optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
self.current_epoch = checkpoint['epoch']
self.global_step = checkpoint['global_step']
self.best_val_loss = checkpoint['best_val_loss']
if self.scheduler is not None and 'scheduler_state_dict' in checkpoint:
self.scheduler.load_state_dict(checkpoint['scheduler_state_dict'])
if self.scaler is not None and 'scaler_state_dict' in checkpoint:
self.scaler.load_state_dict(checkpoint['scaler_state_dict'])
self.logger.info(f"Checkpoint loaded from epoch {self.current_epoch}")
def test(self, test_loader: DataLoader) -> Dict[str, float]:
self.logger.info("Starting testing...")
self.model.eval()
test_loss = 0.0
correct = 0
total = 0
with torch.no_grad():
for inputs, targets in tqdm(test_loader, desc="Testing"):
inputs = inputs.to(self.device, non_blocking=True)
targets = targets.to(self.device, non_blocking=True)
outputs = self.model(inputs)
loss = self.criterion(outputs, targets)
test_loss += loss.item()
_, predicted = outputs.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
avg_loss = test_loss / len(test_loader)
accuracy = 100. * correct / total
self.logger.info(f"Test Loss: {avg_loss:.4f} | Test Acc: {accuracy:.2f}%")
return {'loss': avg_loss, 'accuracy': accuracy} | {
"repo_id": "geekcomputers/Python",
"file_path": "ML/src/python/neuralforge/trainer.py",
"license": "MIT License",
"lines": 199,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:ML/src/python/neuralforge/utils/logger.py | import os
import sys
import logging
from datetime import datetime
from typing import Optional
class Logger:
def __init__(self, log_dir: str, name: str = "neuralforge"):
self.log_dir = log_dir
self.name = name
os.makedirs(log_dir, exist_ok=True)
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
log_file = os.path.join(log_dir, f"{name}_{timestamp}.log")
self.logger = logging.getLogger(name)
self.logger.setLevel(logging.INFO)
if self.logger.hasHandlers():
self.logger.handlers.clear()
file_handler = logging.FileHandler(log_file)
file_handler.setLevel(logging.INFO)
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(logging.INFO)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
file_handler.setFormatter(formatter)
console_handler.setFormatter(formatter)
self.logger.addHandler(file_handler)
self.logger.addHandler(console_handler)
self.info(f"Logger initialized. Logging to: {log_file}")
def info(self, message: str):
self.logger.info(message)
def warning(self, message: str):
self.logger.warning(message)
def error(self, message: str):
self.logger.error(message)
def debug(self, message: str):
self.logger.debug(message)
def log_metrics(self, metrics: dict, step: Optional[int] = None):
if step is not None:
message = f"Step {step}: "
else:
message = "Metrics: "
metric_strs = [f"{k}={v:.4f}" if isinstance(v, float) else f"{k}={v}"
for k, v in metrics.items()]
message += ", ".join(metric_strs)
self.info(message)
def log_model_summary(self, model):
total_params = sum(p.numel() for p in model.parameters())
trainable_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
self.info("=" * 50)
self.info("Model Summary")
self.info("=" * 50)
self.info(f"Total parameters: {total_params:,}")
self.info(f"Trainable parameters: {trainable_params:,}")
self.info(f"Non-trainable parameters: {total_params - trainable_params:,}")
self.info("=" * 50)
def separator(self, char: str = "=", length: int = 80):
self.info(char * length)
class TensorBoardLogger:
def __init__(self, log_dir: str):
self.log_dir = log_dir
try:
from torch.utils.tensorboard import SummaryWriter
self.writer = SummaryWriter(log_dir)
self.enabled = True
except ImportError:
print("TensorBoard not available. Skipping TensorBoard logging.")
self.enabled = False
def log_scalar(self, tag: str, value: float, step: int):
if self.enabled:
self.writer.add_scalar(tag, value, step)
def log_scalars(self, main_tag: str, tag_scalar_dict: dict, step: int):
if self.enabled:
self.writer.add_scalars(main_tag, tag_scalar_dict, step)
def log_histogram(self, tag: str, values, step: int):
if self.enabled:
self.writer.add_histogram(tag, values, step)
def log_image(self, tag: str, img_tensor, step: int):
if self.enabled:
self.writer.add_image(tag, img_tensor, step)
def log_graph(self, model, input_to_model):
if self.enabled:
self.writer.add_graph(model, input_to_model)
def close(self):
if self.enabled:
self.writer.close()
| {
"repo_id": "geekcomputers/Python",
"file_path": "ML/src/python/neuralforge/utils/logger.py",
"license": "MIT License",
"lines": 86,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:ML/src/python/neuralforge/utils/metrics.py | import json
import os
from typing import Dict, List, Any
import numpy as np
class MetricsTracker:
def __init__(self):
self.metrics = []
self.best_metrics = {}
def update(self, metrics: Dict[str, Any]):
self.metrics.append(metrics.copy())
for key, value in metrics.items():
if isinstance(value, (int, float)):
if key not in self.best_metrics:
self.best_metrics[key] = value
else:
if 'loss' in key.lower():
self.best_metrics[key] = min(self.best_metrics[key], value)
else:
self.best_metrics[key] = max(self.best_metrics[key], value)
def get_history(self, key: str) -> List[Any]:
return [m.get(key) for m in self.metrics if key in m]
def get_latest(self, key: str) -> Any:
for m in reversed(self.metrics):
if key in m:
return m[key]
return None
def get_best(self, key: str) -> Any:
return self.best_metrics.get(key)
def get_average(self, key: str, last_n: int = None) -> float:
history = self.get_history(key)
if not history:
return 0.0
if last_n is not None:
history = history[-last_n:]
return np.mean([v for v in history if v is not None])
def save(self, filepath: str):
os.makedirs(os.path.dirname(filepath), exist_ok=True)
data = {
'metrics': self.metrics,
'best_metrics': self.best_metrics
}
with open(filepath, 'w') as f:
json.dump(data, f, indent=2)
def load(self, filepath: str):
with open(filepath, 'r') as f:
data = json.load(f)
self.metrics = data.get('metrics', [])
self.best_metrics = data.get('best_metrics', {})
def summary(self) -> str:
lines = ["=" * 50, "Metrics Summary", "=" * 50]
for key, value in self.best_metrics.items():
latest = self.get_latest(key)
if isinstance(value, float):
lines.append(f"{key}: best={value:.4f}, latest={latest:.4f}")
else:
lines.append(f"{key}: best={value}, latest={latest}")
lines.append("=" * 50)
return "\n".join(lines)
class AverageMeter:
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count if self.count > 0 else 0
class EarlyStopping:
def __init__(self, patience: int = 10, min_delta: float = 0.0, mode: str = 'min'):
self.patience = patience
self.min_delta = min_delta
self.mode = mode
self.counter = 0
self.best_score = None
self.early_stop = False
def __call__(self, score: float) -> bool:
if self.best_score is None:
self.best_score = score
return False
if self.mode == 'min':
improved = score < (self.best_score - self.min_delta)
else:
improved = score > (self.best_score + self.min_delta)
if improved:
self.best_score = score
self.counter = 0
else:
self.counter += 1
if self.counter >= self.patience:
self.early_stop = True
return self.early_stop
class ConfusionMatrix:
def __init__(self, num_classes: int):
self.num_classes = num_classes
self.matrix = np.zeros((num_classes, num_classes), dtype=np.int64)
def update(self, predictions: np.ndarray, targets: np.ndarray):
for pred, target in zip(predictions, targets):
self.matrix[target, pred] += 1
def reset(self):
self.matrix = np.zeros((self.num_classes, self.num_classes), dtype=np.int64)
def compute_metrics(self) -> Dict[str, float]:
tp = np.diag(self.matrix)
fp = np.sum(self.matrix, axis=0) - tp
fn = np.sum(self.matrix, axis=1) - tp
tn = np.sum(self.matrix) - (tp + fp + fn)
accuracy = np.sum(tp) / np.sum(self.matrix) if np.sum(self.matrix) > 0 else 0.0
precision = tp / (tp + fp + 1e-10)
recall = tp / (tp + fn + 1e-10)
f1_score = 2 * (precision * recall) / (precision + recall + 1e-10)
return {
'accuracy': accuracy,
'precision': np.mean(precision),
'recall': np.mean(recall),
'f1_score': np.mean(f1_score)
}
def get_matrix(self) -> np.ndarray:
return self.matrix
def accuracy(predictions, targets):
correct = (predictions == targets).sum()
total = len(targets)
return 100.0 * correct / total if total > 0 else 0.0
def top_k_accuracy(output, target, k=5):
with torch.no_grad():
maxk = min(k, output.size(1))
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
correct_k = correct[:k].reshape(-1).float().sum(0, keepdim=True)
return correct_k.mul_(100.0 / target.size(0)).item()
| {
"repo_id": "geekcomputers/Python",
"file_path": "ML/src/python/neuralforge/utils/metrics.py",
"license": "MIT License",
"lines": 133,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:ML/src/python/neuralforge/utils/visualization.py | import matplotlib.pyplot as plt
import numpy as np
import os
from typing import List, Dict, Optional
def plot_training_curves(
metrics_tracker,
save_path: Optional[str] = None,
figsize: tuple = (15, 5)
):
train_loss = metrics_tracker.get_history('train_loss')
val_loss = metrics_tracker.get_history('val_loss')
train_acc = metrics_tracker.get_history('train_acc')
val_acc = metrics_tracker.get_history('val_acc')
fig, axes = plt.subplots(1, 2, figsize=figsize)
if train_loss:
axes[0].plot(train_loss, label='Train Loss', linewidth=2)
if val_loss:
axes[0].plot(val_loss, label='Val Loss', linewidth=2)
axes[0].set_xlabel('Epoch')
axes[0].set_ylabel('Loss')
axes[0].set_title('Training and Validation Loss')
axes[0].legend()
axes[0].grid(True, alpha=0.3)
if train_acc:
axes[1].plot(train_acc, label='Train Accuracy', linewidth=2)
if val_acc:
axes[1].plot(val_acc, label='Val Accuracy', linewidth=2)
axes[1].set_xlabel('Epoch')
axes[1].set_ylabel('Accuracy (%)')
axes[1].set_title('Training and Validation Accuracy')
axes[1].legend()
axes[1].grid(True, alpha=0.3)
plt.tight_layout()
if save_path:
os.makedirs(os.path.dirname(save_path), exist_ok=True)
plt.savefig(save_path, dpi=300, bbox_inches='tight')
print(f"Training curves saved to {save_path}")
plt.close()
def plot_learning_rate(
lr_history: List[float],
save_path: Optional[str] = None,
figsize: tuple = (10, 5)
):
plt.figure(figsize=figsize)
plt.plot(lr_history, linewidth=2)
plt.xlabel('Step')
plt.ylabel('Learning Rate')
plt.title('Learning Rate Schedule')
plt.grid(True, alpha=0.3)
plt.yscale('log')
if save_path:
os.makedirs(os.path.dirname(save_path), exist_ok=True)
plt.savefig(save_path, dpi=300, bbox_inches='tight')
print(f"Learning rate plot saved to {save_path}")
plt.close()
def plot_confusion_matrix(
cm: np.ndarray,
class_names: Optional[List[str]] = None,
save_path: Optional[str] = None,
figsize: tuple = (10, 8)
):
plt.figure(figsize=figsize)
plt.imshow(cm, interpolation='nearest', cmap=plt.cm.Blues)
plt.title('Confusion Matrix')
plt.colorbar()
if class_names:
tick_marks = np.arange(len(class_names))
plt.xticks(tick_marks, class_names, rotation=45)
plt.yticks(tick_marks, class_names)
thresh = cm.max() / 2.0
for i in range(cm.shape[0]):
for j in range(cm.shape[1]):
plt.text(j, i, format(cm[i, j], 'd'),
ha="center", va="center",
color="white" if cm[i, j] > thresh else "black")
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.tight_layout()
if save_path:
os.makedirs(os.path.dirname(save_path), exist_ok=True)
plt.savefig(save_path, dpi=300, bbox_inches='tight')
print(f"Confusion matrix saved to {save_path}")
plt.close()
def visualize_architecture(architecture, save_path: Optional[str] = None):
layer_types = [gene.get('type', 'unknown') for gene in architecture.genome]
layer_counts = {}
for layer_type in layer_types:
layer_counts[layer_type] = layer_counts.get(layer_type, 0) + 1
plt.figure(figsize=(10, 6))
plt.bar(layer_counts.keys(), layer_counts.values())
plt.xlabel('Layer Type')
plt.ylabel('Count')
plt.title('Architecture Layer Distribution')
plt.xticks(rotation=45)
plt.grid(True, alpha=0.3, axis='y')
plt.tight_layout()
if save_path:
os.makedirs(os.path.dirname(save_path), exist_ok=True)
plt.savefig(save_path, dpi=300, bbox_inches='tight')
print(f"Architecture visualization saved to {save_path}")
plt.close()
def plot_nas_history(
history: List[Dict],
save_path: Optional[str] = None,
figsize: tuple = (15, 5)
):
generations = [h['generation'] for h in history]
best_fitness = [h['best_fitness'] for h in history]
avg_fitness = [h['avg_fitness'] for h in history]
best_accuracy = [h['best_accuracy'] for h in history]
avg_accuracy = [h['avg_accuracy'] for h in history]
fig, axes = plt.subplots(1, 2, figsize=figsize)
axes[0].plot(generations, best_fitness, label='Best Fitness', linewidth=2, marker='o')
axes[0].plot(generations, avg_fitness, label='Avg Fitness', linewidth=2, marker='s')
axes[0].set_xlabel('Generation')
axes[0].set_ylabel('Fitness')
axes[0].set_title('NAS Fitness Evolution')
axes[0].legend()
axes[0].grid(True, alpha=0.3)
axes[1].plot(generations, best_accuracy, label='Best Accuracy', linewidth=2, marker='o')
axes[1].plot(generations, avg_accuracy, label='Avg Accuracy', linewidth=2, marker='s')
axes[1].set_xlabel('Generation')
axes[1].set_ylabel('Accuracy (%)')
axes[1].set_title('NAS Accuracy Evolution')
axes[1].legend()
axes[1].grid(True, alpha=0.3)
plt.tight_layout()
if save_path:
os.makedirs(os.path.dirname(save_path), exist_ok=True)
plt.savefig(save_path, dpi=300, bbox_inches='tight')
print(f"NAS history plot saved to {save_path}")
plt.close()
def plot_gradient_flow(named_parameters, save_path: Optional[str] = None):
ave_grads = []
max_grads = []
layers = []
for n, p in named_parameters:
if p.requires_grad and p.grad is not None:
layers.append(n)
ave_grads.append(p.grad.abs().mean().cpu().item())
max_grads.append(p.grad.abs().max().cpu().item())
plt.figure(figsize=(12, 6))
plt.bar(np.arange(len(max_grads)), max_grads, alpha=0.5, lw=1, color="c", label="max gradient")
plt.bar(np.arange(len(ave_grads)), ave_grads, alpha=0.5, lw=1, color="b", label="mean gradient")
plt.hlines(0, 0, len(ave_grads) + 1, lw=2, color="k")
plt.xticks(range(0, len(ave_grads), 1), layers, rotation="vertical")
plt.xlim(left=0, right=len(ave_grads))
plt.ylim(bottom=-0.001, top=max(max_grads) * 1.1)
plt.xlabel("Layers")
plt.ylabel("Gradient")
plt.title("Gradient Flow")
plt.grid(True, alpha=0.3)
plt.legend()
plt.tight_layout()
if save_path:
os.makedirs(os.path.dirname(save_path), exist_ok=True)
plt.savefig(save_path, dpi=300, bbox_inches='tight')
print(f"Gradient flow plot saved to {save_path}")
plt.close() | {
"repo_id": "geekcomputers/Python",
"file_path": "ML/src/python/neuralforge/utils/visualization.py",
"license": "MIT License",
"lines": 159,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:ML/tests/gui_test.py | import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
from PyQt6.QtWidgets import (QApplication, QMainWindow, QWidget, QVBoxLayout,
QHBoxLayout, QPushButton, QLabel, QLineEdit,
QFileDialog, QProgressBar, QTextEdit, QGroupBox,
QGridLayout)
from PyQt6.QtCore import Qt, QThread, pyqtSignal
from PyQt6.QtGui import QPixmap, QFont
import torch
import torch.nn.functional as F
from torchvision import transforms
from PIL import Image
from src.python.neuralforge.data.datasets import get_dataset, get_num_classes
from src.python.neuralforge.models.resnet import ResNet18
class PredictionThread(QThread):
finished = pyqtSignal(list, list, str)
error = pyqtSignal(str)
def __init__(self, model, image_path, classes, device):
super().__init__()
self.model = model
self.image_path = image_path
self.classes = classes
self.device = device
def run(self):
try:
image = Image.open(self.image_path).convert('RGB')
transform = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
image_tensor = transform(image).unsqueeze(0).to(self.device)
with torch.no_grad():
outputs = self.model(image_tensor)
probabilities = F.softmax(outputs, dim=1)
top5_prob, top5_idx = torch.topk(probabilities, min(5, len(self.classes)), dim=1)
predictions = []
confidences = []
for idx, prob in zip(top5_idx[0].cpu().numpy(), top5_prob[0].cpu().numpy()):
predictions.append(self.classes[idx])
confidences.append(float(prob) * 100)
main_prediction = predictions[0]
self.finished.emit(predictions, confidences, main_prediction)
except Exception as e:
self.error.emit(str(e))
class NeuralForgeGUI(QMainWindow):
def __init__(self):
super().__init__()
self.model = None
self.device = 'cuda' if torch.cuda.is_available() else 'cpu'
self.classes = []
self.dataset_name = 'cifar10'
self.init_ui()
self.apply_stylesheet()
def init_ui(self):
self.setWindowTitle('NeuralForge - Model Tester')
self.setGeometry(100, 100, 1200, 800)
central_widget = QWidget()
self.setCentralWidget(central_widget)
main_layout = QHBoxLayout()
central_widget.setLayout(main_layout)
left_panel = self.create_left_panel()
right_panel = self.create_right_panel()
main_layout.addWidget(left_panel, 1)
main_layout.addWidget(right_panel, 1)
def create_left_panel(self):
panel = QWidget()
layout = QVBoxLayout()
panel.setLayout(layout)
title = QLabel('🚀 NeuralForge Model Tester')
title.setFont(QFont('Arial', 20, QFont.Weight.Bold))
title.setAlignment(Qt.AlignmentFlag.AlignCenter)
layout.addWidget(title)
model_group = QGroupBox('Model Selection')
model_layout = QVBoxLayout()
model_path_layout = QHBoxLayout()
self.model_path_input = QLineEdit()
self.model_path_input.setPlaceholderText('Path to model file (.pt)')
model_path_layout.addWidget(self.model_path_input)
browse_btn = QPushButton('Browse')
browse_btn.clicked.connect(self.browse_model)
model_path_layout.addWidget(browse_btn)
default_btn = QPushButton('Use Default')
default_btn.clicked.connect(self.use_default_model)
model_path_layout.addWidget(default_btn)
model_layout.addLayout(model_path_layout)
dataset_layout = QHBoxLayout()
dataset_label = QLabel('Dataset:')
self.dataset_input = QLineEdit('cifar10')
self.dataset_input.setPlaceholderText('cifar10, mnist, stl10, tiny_imagenet, etc.')
self.dataset_input.setToolTip('Supported: cifar10, cifar100, mnist, fashion_mnist, stl10,\ntiny_imagenet, imagenet, food101, caltech256, oxford_pets')
dataset_layout.addWidget(dataset_label)
dataset_layout.addWidget(self.dataset_input)
model_layout.addLayout(dataset_layout)
self.load_model_btn = QPushButton('Load Model')
self.load_model_btn.clicked.connect(self.load_model)
model_layout.addWidget(self.load_model_btn)
self.model_status = QLabel('No model loaded')
self.model_status.setAlignment(Qt.AlignmentFlag.AlignCenter)
model_layout.addWidget(self.model_status)
model_group.setLayout(model_layout)
layout.addWidget(model_group)
image_group = QGroupBox('Image Selection')
image_layout = QVBoxLayout()
image_path_layout = QHBoxLayout()
self.image_path_input = QLineEdit()
self.image_path_input.setPlaceholderText('Path to image file')
image_path_layout.addWidget(self.image_path_input)
browse_image_btn = QPushButton('Browse')
browse_image_btn.clicked.connect(self.browse_image)
image_path_layout.addWidget(browse_image_btn)
image_layout.addLayout(image_path_layout)
self.image_preview = QLabel()
self.image_preview.setAlignment(Qt.AlignmentFlag.AlignCenter)
self.image_preview.setMinimumHeight(300)
self.image_preview.setStyleSheet('border: 2px dashed #666; border-radius: 10px;')
self.image_preview.setText('No image selected')
image_layout.addWidget(self.image_preview)
self.predict_btn = QPushButton('🔍 Predict')
self.predict_btn.clicked.connect(self.predict_image)
self.predict_btn.setEnabled(False)
image_layout.addWidget(self.predict_btn)
image_group.setLayout(image_layout)
layout.addWidget(image_group)
layout.addStretch()
return panel
def create_right_panel(self):
panel = QWidget()
layout = QVBoxLayout()
panel.setLayout(layout)
results_group = QGroupBox('Prediction Results')
results_layout = QVBoxLayout()
self.main_prediction = QLabel('No prediction yet')
self.main_prediction.setFont(QFont('Arial', 24, QFont.Weight.Bold))
self.main_prediction.setAlignment(Qt.AlignmentFlag.AlignCenter)
self.main_prediction.setStyleSheet('color: #4CAF50; padding: 20px;')
results_layout.addWidget(self.main_prediction)
self.confidence_label = QLabel('')
self.confidence_label.setFont(QFont('Arial', 16))
self.confidence_label.setAlignment(Qt.AlignmentFlag.AlignCenter)
results_layout.addWidget(self.confidence_label)
self.progress_bar = QProgressBar()
self.progress_bar.setVisible(False)
results_layout.addWidget(self.progress_bar)
results_group.setLayout(results_layout)
layout.addWidget(results_group)
top5_group = QGroupBox('Top-5 Predictions')
top5_layout = QVBoxLayout()
self.top5_display = QTextEdit()
self.top5_display.setReadOnly(True)
self.top5_display.setMinimumHeight(200)
top5_layout.addWidget(self.top5_display)
top5_group.setLayout(top5_layout)
layout.addWidget(top5_group)
info_group = QGroupBox('Model Information')
info_layout = QVBoxLayout()
self.model_info = QTextEdit()
self.model_info.setReadOnly(True)
self.model_info.setMaximumHeight(150)
info_layout.addWidget(self.model_info)
info_group.setLayout(info_layout)
layout.addWidget(info_group)
layout.addStretch()
return panel
def apply_stylesheet(self):
qss = """
QMainWindow {
background-color: #1e1e1e;
}
QWidget {
background-color: #1e1e1e;
color: #e0e0e0;
font-family: 'Segoe UI', Arial;
font-size: 12px;
}
QGroupBox {
border: 2px solid #3d3d3d;
border-radius: 8px;
margin-top: 10px;
padding-top: 15px;
font-weight: bold;
color: #4CAF50;
}
QGroupBox::title {
subcontrol-origin: margin;
left: 10px;
padding: 0 5px;
}
QPushButton {
background-color: #4CAF50;
color: white;
border: none;
padding: 10px 20px;
border-radius: 5px;
font-weight: bold;
font-size: 13px;
}
QPushButton:hover {
background-color: #45a049;
}
QPushButton:pressed {
background-color: #3d8b40;
}
QPushButton:disabled {
background-color: #555555;
color: #888888;
}
QLineEdit {
background-color: #2d2d2d;
border: 2px solid #3d3d3d;
border-radius: 5px;
padding: 8px;
color: #e0e0e0;
}
QLineEdit:focus {
border: 2px solid #4CAF50;
}
QTextEdit {
background-color: #2d2d2d;
border: 2px solid #3d3d3d;
border-radius: 5px;
padding: 10px;
color: #e0e0e0;
}
QLabel {
color: #e0e0e0;
}
QProgressBar {
border: 2px solid #3d3d3d;
border-radius: 5px;
text-align: center;
background-color: #2d2d2d;
}
QProgressBar::chunk {
background-color: #4CAF50;
border-radius: 3px;
}
"""
self.setStyleSheet(qss)
def browse_model(self):
file_path, _ = QFileDialog.getOpenFileName(
self,
'Select Model File',
'../models',
'Model Files (*.pt *.pth);;All Files (*.*)'
)
if file_path:
self.model_path_input.setText(file_path)
def use_default_model(self):
default_path = os.path.join(os.path.dirname(__file__), '..', 'models', 'final_model.pt')
self.model_path_input.setText(os.path.abspath(default_path))
def browse_image(self):
file_path, _ = QFileDialog.getOpenFileName(
self,
'Select Image File',
'',
'Image Files (*.png *.jpg *.jpeg *.bmp *.gif);;All Files (*.*)'
)
if file_path:
self.image_path_input.setText(file_path)
self.display_image(file_path)
def display_image(self, image_path):
try:
pixmap = QPixmap(image_path)
scaled_pixmap = pixmap.scaled(400, 300, Qt.AspectRatioMode.KeepAspectRatio,
Qt.TransformationMode.SmoothTransformation)
self.image_preview.setPixmap(scaled_pixmap)
except Exception as e:
self.image_preview.setText(f'Error loading image: {e}')
def load_model(self):
model_path = self.model_path_input.text()
dataset_input = self.dataset_input.text().lower().strip()
dataset_aliases = {
'cifar10': 'cifar10',
'cifar-10': 'cifar10',
'cifar_10': 'cifar10',
'cifar100': 'cifar100',
'cifar-100': 'cifar100',
'cifar_100': 'cifar100',
'mnist': 'mnist',
'fashionmnist': 'fashion_mnist',
'fashion-mnist': 'fashion_mnist',
'fashion_mnist': 'fashion_mnist',
'stl10': 'stl10',
'stl-10': 'stl10',
'stl_10': 'stl10',
'tinyimagenet': 'tiny_imagenet',
'tiny-imagenet': 'tiny_imagenet',
'tiny_imagenet': 'tiny_imagenet',
'imagenet': 'imagenet',
'food101': 'food101',
'food-101': 'food101',
'food_101': 'food101',
'caltech256': 'caltech256',
'caltech-256': 'caltech256',
'caltech_256': 'caltech256',
'oxfordpets': 'oxford_pets',
'oxford-pets': 'oxford_pets',
'oxford_pets': 'oxford_pets',
}
self.dataset_name = dataset_aliases.get(dataset_input, dataset_input)
if not model_path:
self.model_status.setText('Please select a model file')
self.model_status.setStyleSheet('color: #f44336;')
return
if not os.path.exists(model_path):
self.model_status.setText('Model file not found')
self.model_status.setStyleSheet('color: #f44336;')
return
try:
self.model_status.setText('Loading model...')
self.model_status.setStyleSheet('color: #FFC107;')
QApplication.processEvents()
num_classes = get_num_classes(self.dataset_name)
self.model = ResNet18(num_classes=num_classes)
self.model = self.model.to(self.device)
checkpoint = torch.load(model_path, map_location=self.device, weights_only=False)
self.model.load_state_dict(checkpoint['model_state_dict'])
self.model.eval()
try:
dataset = get_dataset(self.dataset_name, train=False, download=False)
self.classes = getattr(dataset, 'classes', [str(i) for i in range(num_classes)])
except:
from src.python.neuralforge.data.datasets import get_class_names
self.classes = get_class_names(self.dataset_name)
self.model_status.setText(f'✓ Model loaded successfully')
self.model_status.setStyleSheet('color: #4CAF50;')
self.predict_btn.setEnabled(True)
total_params = sum(p.numel() for p in self.model.parameters())
epoch = checkpoint.get('epoch', 'Unknown')
val_loss = checkpoint.get('best_val_loss', 'Unknown')
val_loss_str = f"{val_loss:.4f}" if isinstance(val_loss, float) else str(val_loss)
info_text = f"""
Model: ResNet18
Dataset: {self.dataset_name.upper()}
Classes: {num_classes}
Parameters: {total_params:,}
Epoch: {epoch}
Best Val Loss: {val_loss_str}
Device: {self.device.upper()}
"""
self.model_info.setText(info_text.strip())
except Exception as e:
self.model_status.setText(f'Error: {str(e)}')
self.model_status.setStyleSheet('color: #f44336;')
def predict_image(self):
image_path = self.image_path_input.text()
if not image_path or not os.path.exists(image_path):
self.main_prediction.setText('Please select a valid image')
self.main_prediction.setStyleSheet('color: #f44336;')
return
if self.model is None:
self.main_prediction.setText('Please load a model first')
self.main_prediction.setStyleSheet('color: #f44336;')
return
self.predict_btn.setEnabled(False)
self.progress_bar.setVisible(True)
self.progress_bar.setRange(0, 0)
self.prediction_thread = PredictionThread(self.model, image_path, self.classes, self.device)
self.prediction_thread.finished.connect(self.display_results)
self.prediction_thread.error.connect(self.display_error)
self.prediction_thread.start()
def display_results(self, predictions, confidences, main_prediction):
self.progress_bar.setVisible(False)
self.predict_btn.setEnabled(True)
self.main_prediction.setText(f'🎯 {main_prediction}')
self.main_prediction.setStyleSheet('color: #4CAF50; padding: 20px; font-size: 28px;')
self.confidence_label.setText(f'Confidence: {confidences[0]:.2f}%')
top5_text = '<h3>Top-5 Predictions:</h3><hr>'
for i, (pred, conf) in enumerate(zip(predictions, confidences), 1):
bar_width = int(conf * 3)
bar = '█' * bar_width
top5_text += f'<p style="margin: 10px 0;"><b>{i}. {pred}</b><br>'
top5_text += f'<span style="color: #4CAF50;">{bar}</span> {conf:.2f}%</p>'
self.top5_display.setHtml(top5_text)
def display_error(self, error_msg):
self.progress_bar.setVisible(False)
self.predict_btn.setEnabled(True)
self.main_prediction.setText(f'Error: {error_msg}')
self.main_prediction.setStyleSheet('color: #f44336;')
def main():
app = QApplication(sys.argv)
window = NeuralForgeGUI()
window.show()
sys.exit(app.exec())
if __name__ == '__main__':
main() | {
"repo_id": "geekcomputers/Python",
"file_path": "ML/tests/gui_test.py",
"license": "MIT License",
"lines": 389,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
geekcomputers/Python:ML/tests/quick_test.py | import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
import torch
from src.python.neuralforge.data.datasets import get_dataset
from src.python.neuralforge.models.resnet import ResNet18
print("=" * 60)
print(" NeuralForge Quick Test")
print("=" * 60)
print("\n[1/3] Testing CIFAR-10 dataset download...")
try:
dataset = get_dataset('cifar10', root='./data', train=False, download=True)
print(f"✓ CIFAR-10 loaded: {len(dataset)} samples")
print(f" Classes: {dataset.classes}")
except Exception as e:
print(f"✗ Failed: {e}")
print("\n[2/3] Testing model creation...")
try:
model = ResNet18(num_classes=10)
print(f"✓ Model created: {sum(p.numel() for p in model.parameters()):,} parameters")
except Exception as e:
print(f"✗ Failed: {e}")
print("\n[3/3] Testing inference...")
try:
model.eval()
image, label = dataset[0]
with torch.no_grad():
output = model(image.unsqueeze(0))
print(f"✓ Inference successful: output shape {output.shape}")
print(f" True label: {dataset.classes[label]}")
pred = output.argmax(1).item()
print(f" Predicted: {dataset.classes[pred]}")
except Exception as e:
print(f"✗ Failed: {e}")
print("\n" + "=" * 60)
print(" All tests passed! Ready to train.")
print("=" * 60)
print("\nTry these commands:")
print(" python train.py --dataset cifar10 --epochs 20")
print(" python tests/test_model.py --dataset cifar10 --mode interactive")
print("=" * 60)
| {
"repo_id": "geekcomputers/Python",
"file_path": "ML/tests/quick_test.py",
"license": "MIT License",
"lines": 41,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
geekcomputers/Python:ML/tests/test_model.py | import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
import torch
import torch.nn.functional as F
from torchvision import transforms
from PIL import Image
import numpy as np
from src.python.neuralforge.data.datasets import get_dataset, get_num_classes, get_class_names
from src.python.neuralforge.models.resnet import ResNet18
class ModelTester:
def __init__(self, model_path='./models/best_model.pt', dataset='cifar10', device='cuda'):
self.device = device if torch.cuda.is_available() else 'cpu'
self.dataset_name = dataset
print("=" * 60)
print(" NeuralForge - Interactive Model Testing")
print("=" * 60)
print(f"Device: {self.device}")
num_classes = get_num_classes(dataset)
self.model = self.create_model(num_classes)
if os.path.exists(model_path):
print(f"Loading model from: {model_path}")
checkpoint = torch.load(model_path, map_location=self.device, weights_only=False)
self.model.load_state_dict(checkpoint['model_state_dict'])
print(f"Model loaded from epoch {checkpoint['epoch']}")
else:
print(f"Warning: No model found at {model_path}, using untrained model")
self.model.eval()
test_dataset = get_dataset(dataset, root='./data', train=False, download=True)
self.dataset = test_dataset.dataset
self.classes = get_class_names(dataset)
if dataset in ['mnist', 'fashion_mnist']:
self.image_size = 28
elif dataset in ['cifar10', 'cifar100']:
self.image_size = 32
elif dataset == 'stl10':
self.image_size = 96
else:
self.image_size = 224
print(f"Dataset: {dataset} ({len(self.dataset)} test samples)")
print(f"Classes: {len(self.classes)}")
print("=" * 60)
def create_model(self, num_classes):
model = ResNet18(num_classes=num_classes)
return model.to(self.device)
def predict_image(self, image_tensor):
with torch.no_grad():
image_tensor = image_tensor.unsqueeze(0).to(self.device)
outputs = self.model(image_tensor)
probabilities = F.softmax(outputs, dim=1)
confidence, predicted = torch.max(probabilities, 1)
top5_prob, top5_idx = torch.topk(probabilities, min(5, len(self.classes)), dim=1)
return predicted.item(), confidence.item(), top5_idx[0].cpu().numpy(), top5_prob[0].cpu().numpy()
def test_random_samples(self, num_samples=10):
print(f"\nTesting {num_samples} random samples...")
print("-" * 60)
correct = 0
indices = np.random.choice(len(self.dataset), num_samples, replace=False)
for i, idx in enumerate(indices, 1):
image, label = self.dataset[idx]
pred_class, confidence, top5_idx, top5_prob = self.predict_image(image)
true_label = self.classes[label]
pred_label = self.classes[pred_class]
is_correct = pred_class == label
correct += is_correct
status = "✓" if is_correct else "✗"
print(f"{i:2d}. {status} True: {true_label:15s} | Pred: {pred_label:15s} | Conf: {confidence:.2%}")
if not is_correct:
print(f" Top-5: ", end="")
for j, (idx, prob) in enumerate(zip(top5_idx, top5_prob)):
print(f"{self.classes[idx]}({prob:.1%})", end=" ")
print()
accuracy = correct / num_samples
print("-" * 60)
print(f"Accuracy: {accuracy:.1%} ({correct}/{num_samples})")
def test_specific_sample(self, index):
if index < 0 or index >= len(self.dataset):
print(f"Error: Index must be between 0 and {len(self.dataset)-1}")
return
image, label = self.dataset[index]
pred_class, confidence, top5_idx, top5_prob = self.predict_image(image)
print(f"\nSample #{index}")
print("-" * 60)
print(f"True Label: {self.classes[label]}")
print(f"Predicted: {self.classes[pred_class]}")
print(f"Confidence: {confidence:.2%}")
print(f"Status: {'✓ Correct' if pred_class == label else '✗ Wrong'}")
print("\nTop-5 Predictions:")
for i, (idx, prob) in enumerate(zip(top5_idx, top5_prob), 1):
print(f" {i}. {self.classes[idx]:15s} {prob:.2%}")
def test_class_accuracy(self):
print("\nCalculating per-class accuracy...")
print("-" * 60)
class_correct = [0] * len(self.classes)
class_total = [0] * len(self.classes)
with torch.no_grad():
for i, (image, label) in enumerate(self.dataset):
pred_class, _, _, _ = self.predict_image(image)
class_total[label] += 1
if pred_class == label:
class_correct[label] += 1
if (i + 1) % 100 == 0:
print(f"Processed {i + 1}/{len(self.dataset)} samples...", end='\r')
print(" " * 60, end='\r')
print("Per-class Accuracy:")
overall_correct = sum(class_correct)
overall_total = sum(class_total)
for i, class_name in enumerate(self.classes):
if class_total[i] > 0:
acc = 100.0 * class_correct[i] / class_total[i]
print(f" {class_name:15s}: {acc:5.1f}% ({class_correct[i]}/{class_total[i]})")
print("-" * 60)
print(f"Overall Accuracy: {100.0 * overall_correct / overall_total:.2f}% ({overall_correct}/{overall_total})")
def test_custom_image(self, image_path):
if not os.path.exists(image_path):
print(f"Error: Image not found at {image_path}")
return
try:
image = Image.open(image_path).convert('RGB')
transform = transforms.Compose([
transforms.Resize((self.image_size, self.image_size)),
transforms.ToTensor(),
])
image_tensor = transform(image)
pred_class, confidence, top5_idx, top5_prob = self.predict_image(image_tensor)
print(f"\nCustom Image: {image_path}")
print("-" * 60)
print(f"Predicted: {self.classes[pred_class]}")
print(f"Confidence: {confidence:.2%}")
print("\nTop-5 Predictions:")
for i, (idx, prob) in enumerate(zip(top5_idx, top5_prob), 1):
print(f" {i}. {self.classes[idx]:15s} {prob:.2%}")
except Exception as e:
print(f"Error loading image: {e}")
def interactive_mode(self):
print("\n" + "=" * 60)
print(" Interactive Mode")
print("=" * 60)
print("\nCommands:")
print(" random [N] - Test N random samples (default: 10)")
print(" sample <index> - Test specific sample by index")
print(" image <path> - Test custom image file")
print(" accuracy - Calculate full test set accuracy")
print(" help - Show this help")
print(" exit - Exit interactive mode")
print()
while True:
try:
command = input(">>> ").strip().lower()
if not command:
continue
if command == 'exit' or command == 'quit':
print("Exiting...")
break
elif command == 'help':
self.interactive_mode()
return
elif command.startswith('random'):
parts = command.split()
n = int(parts[1]) if len(parts) > 1 else 10
self.test_random_samples(n)
elif command.startswith('sample'):
parts = command.split()
if len(parts) < 2:
print("Usage: sample <index>")
else:
idx = int(parts[1])
self.test_specific_sample(idx)
elif command.startswith('image'):
parts = command.split(maxsplit=1)
if len(parts) < 2:
print("Usage: image <path>")
else:
self.test_custom_image(parts[1])
elif command == 'accuracy':
self.test_class_accuracy()
else:
print(f"Unknown command: {command}")
print("Type 'help' for available commands")
except KeyboardInterrupt:
print("\nExiting...")
break
except Exception as e:
print(f"Error: {e}")
def main():
import argparse
parser = argparse.ArgumentParser(description='Test trained NeuralForge model')
default_model = os.path.join(os.path.dirname(__file__), '..', 'models', 'best_model.pt')
parser.add_argument('--model', type=str, default=default_model, help='Path to model checkpoint')
parser.add_argument('--dataset', type=str, default='cifar10',
choices=['cifar10', 'cifar100', 'mnist', 'fashion_mnist', 'stl10',
'tiny_imagenet', 'imagenet', 'food101', 'caltech256', 'oxford_pets'],
help='Dataset to test on')
parser.add_argument('--device', type=str, default='cuda', help='Device to use')
parser.add_argument('--mode', type=str, default='interactive',
choices=['interactive', 'random', 'accuracy'],
help='Testing mode')
parser.add_argument('--samples', type=int, default=10, help='Number of samples for random mode')
args = parser.parse_args()
tester = ModelTester(model_path=args.model, dataset=args.dataset, device=args.device)
if args.mode == 'interactive':
tester.interactive_mode()
elif args.mode == 'random':
tester.test_random_samples(args.samples)
elif args.mode == 'accuracy':
tester.test_class_accuracy()
if __name__ == '__main__':
main()
| {
"repo_id": "geekcomputers/Python",
"file_path": "ML/tests/test_model.py",
"license": "MIT License",
"lines": 208,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
geekcomputers/Python:ML/train.py | import torch
import torch.nn as nn
import torch.optim as optim
import argparse
import os
import random
import numpy as np
from src.python.neuralforge import nn as nf_nn
from src.python.neuralforge import optim as nf_optim
from src.python.neuralforge.trainer import Trainer
from src.python.neuralforge.config import Config
from src.python.neuralforge.data.dataset import SyntheticDataset, DataLoaderBuilder
from src.python.neuralforge.data.datasets import get_dataset, get_num_classes
from src.python.neuralforge.data.transforms import get_transforms
from src.python.neuralforge.models.resnet import ResNet18
from src.python.neuralforge.utils.logger import Logger
def set_seed(seed):
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
def create_simple_model(num_classes=10):
return nn.Sequential(
nn.Conv2d(3, 32, 3, padding=1),
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
nn.MaxPool2d(2),
nn.Conv2d(32, 64, 3, padding=1),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
nn.MaxPool2d(2),
nn.Conv2d(64, 128, 3, padding=1),
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.AdaptiveAvgPool2d(1),
nn.Flatten(),
nn.Linear(128, num_classes)
)
def main():
parser = argparse.ArgumentParser(description='NeuralForge Training')
parser.add_argument('--config', type=str, default=None, help='Path to config file')
parser.add_argument('--model', type=str, default='simple', choices=['simple', 'resnet18', 'efficientnet', 'vit'])
parser.add_argument('--batch-size', type=int, default=32)
parser.add_argument('--epochs', type=int, default=50)
parser.add_argument('--lr', type=float, default=0.001)
parser.add_argument('--device', type=str, default='cuda' if torch.cuda.is_available() else 'cpu')
parser.add_argument('--num-samples', type=int, default=5000, help='Number of synthetic samples')
parser.add_argument('--num-classes', type=int, default=10)
parser.add_argument('--seed', type=int, default=42)
parser.add_argument('--dataset', type=str, default='synthetic',
choices=['synthetic', 'cifar10', 'cifar100', 'mnist', 'fashion_mnist', 'stl10',
'tiny_imagenet', 'imagenet', 'food101', 'caltech256', 'oxford_pets'],
help='Dataset to use')
args = parser.parse_args()
if args.config:
config = Config.load(args.config)
else:
config = Config()
config.batch_size = args.batch_size
config.epochs = args.epochs
config.learning_rate = args.lr
config.device = args.device
config.num_classes = args.num_classes
config.seed = args.seed
set_seed(config.seed)
logger = Logger(config.log_dir, "training")
logger.info("=" * 80)
logger.info("NeuralForge Training Framework")
logger.info("=" * 80)
logger.info(f"Configuration:\n{config}")
if args.dataset == 'synthetic':
logger.info("Creating synthetic dataset...")
train_dataset = SyntheticDataset(
num_samples=args.num_samples,
num_classes=config.num_classes,
image_size=config.image_size,
channels=3
)
val_dataset = SyntheticDataset(
num_samples=args.num_samples // 5,
num_classes=config.num_classes,
image_size=config.image_size,
channels=3
)
else:
logger.info(f"Downloading and loading {args.dataset} dataset...")
config.num_classes = get_num_classes(args.dataset)
train_dataset = get_dataset(args.dataset, root=config.data_path, train=True, download=True)
val_dataset = get_dataset(args.dataset, root=config.data_path, train=False, download=True)
if args.dataset in ['mnist', 'fashion_mnist']:
config.image_size = 28
elif args.dataset in ['cifar10', 'cifar100']:
config.image_size = 32
elif args.dataset == 'tiny_imagenet':
config.image_size = 64
elif args.dataset == 'stl10':
config.image_size = 96
elif args.dataset in ['imagenet', 'food101', 'caltech256', 'oxford_pets']:
config.image_size = 224
loader_builder = DataLoaderBuilder(config)
train_loader = loader_builder.build_train_loader(train_dataset)
val_loader = loader_builder.build_val_loader(val_dataset)
logger.info(f"Train dataset size: {len(train_dataset)}")
logger.info(f"Validation dataset size: {len(val_dataset)}")
logger.info(f"Creating model: {args.model}")
if args.model == 'simple':
model = create_simple_model(config.num_classes)
elif args.model == 'resnet18':
model = ResNet18(num_classes=config.num_classes)
else:
model = create_simple_model(config.num_classes)
logger.log_model_summary(model)
criterion = nn.CrossEntropyLoss()
if config.optimizer.lower() == 'adamw':
optimizer = nf_optim.AdamW(
model.parameters(),
lr=config.learning_rate,
weight_decay=config.weight_decay
)
elif config.optimizer.lower() == 'adam':
optimizer = optim.Adam(
model.parameters(),
lr=config.learning_rate,
weight_decay=config.weight_decay
)
else:
optimizer = optim.SGD(
model.parameters(),
lr=config.learning_rate,
momentum=0.9,
weight_decay=config.weight_decay
)
if config.scheduler == 'cosine':
scheduler = nf_optim.CosineAnnealingWarmRestarts(
optimizer,
T_0=10,
T_mult=2,
eta_min=1e-6
)
elif config.scheduler == 'onecycle':
scheduler = nf_optim.OneCycleLR(
optimizer,
max_lr=config.learning_rate,
total_steps=config.epochs * len(train_loader)
)
else:
scheduler = None
logger.info(f"Optimizer: {config.optimizer}")
logger.info(f"Scheduler: {config.scheduler}")
trainer = Trainer(
model=model,
train_loader=train_loader,
val_loader=val_loader,
optimizer=optimizer,
criterion=criterion,
config=config,
scheduler=scheduler,
device=config.device
)
logger.info("Starting training...")
trainer.train()
logger.info("Training completed successfully!")
logger.info(f"Best validation loss: {trainer.best_val_loss:.4f}")
config.save(os.path.join(config.log_dir, 'config.json'))
logger.info(f"Configuration saved to {os.path.join(config.log_dir, 'config.json')}")
if __name__ == '__main__':
main()
| {
"repo_id": "geekcomputers/Python",
"file_path": "ML/train.py",
"license": "MIT License",
"lines": 169,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:remoteok_jobs_scraper/remoteok_jobs.py | import requests
import xlwt
from xlwt import Workbook
BASE_URL = 'https://remoteok.com/api'
USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36'
REQUEST_HEADER = {
'User-Agent': USER_AGENT,
'Accept-Language': 'en-US, en;q=0.5',
}
def get_job_postings():
"""Fetch job postings from RemoteOK API."""
try:
res = requests.get(BASE_URL, headers=REQUEST_HEADER)
res.raise_for_status()
data = res.json()
return data[1:]
except requests.RequestException as e:
print("Error fetching jobs:", e)
return []
def save_jobs_to_excel(jobs, filename='remoteok_jobs.xls'):
"""Save job postings to an Excel file."""
if not jobs:
print("No job data to save.")
return
wb = Workbook()
sheet = wb.add_sheet('Jobs')
headers = list(jobs[0].keys())
for col, header in enumerate(headers):
sheet.write(0, col, header)
for row, job in enumerate(jobs, start=1):
for col, key in enumerate(headers):
sheet.write(row, col, str(job.get(key, '')))
wb.save(filename)
print(f"Jobs saved to {filename}")
if __name__ == '__main__':
jobs = get_job_postings()
save_jobs_to_excel(jobs)
| {
"repo_id": "geekcomputers/Python",
"file_path": "remoteok_jobs_scraper/remoteok_jobs.py",
"license": "MIT License",
"lines": 37,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:1 File handle/File handle binary/delete.py | import logging
import os
import pickle
from dotenv import load_dotenv
base = os.path.dirname(__file__)
load_dotenv(os.path.join(base, ".env"))
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
student_record = os.getenv("STUDENTS_RECORD_FILE")
def b_read():
# Opening a file & loading it
if not os.path.exists(student_record):
logging.warning("File not found")
return
with open(student_record, "rb") as F:
student = pickle.load(F)
logging.info("File opened successfully")
logging.info("Records in the file are:")
for i in student:
logging.info(i)
def b_modify():
# Deleting the Roll no. entered by user
if not os.path.exists(student_record):
logging.warning("File not found")
return
roll_no = int(input("Enter the Roll No. to be deleted: "))
student = 0
with open(student_record, "rb") as F:
student = pickle.load(F)
with open(student_record, "wb") as F:
rec = [i for i in student if i[0] != roll_no]
pickle.dump(rec, F)
b_read()
b_modify()
| {
"repo_id": "geekcomputers/Python",
"file_path": "1 File handle/File handle binary/delete.py",
"license": "MIT License",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:1 File handle/File handle binary/update2.py | # Updating records in a binary file
# ! Have a .env file please
import pickle
import os
from dotenv import load_dotenv
base = os.path.dirname(__file__)
load_dotenv(os.path.join(base, ".env"))
student_record = os.getenv("STUDENTS_RECORD_FILE")
def update():
with open(student_record, "rb") as F:
S = pickle.load(F)
found = False
rno = int(input("enter the roll number you want to update"))
for i in S:
if rno == i[0]:
print(f"the current name is {i[1]}")
i[1] = input("enter the new name")
found = True
break
if found:
print("Record not found")
with open(student_record, "wb") as F:
pickle.dump(S, F)
update()
| {
"repo_id": "geekcomputers/Python",
"file_path": "1 File handle/File handle binary/update2.py",
"license": "MIT License",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle binary/Update a binary file2.py | # updating records in a binary file
import pickle
import os
base = os.path.dirname(__file__)
from dotenv import load_dotenv
load_dotenv(os.path.join(base, ".env"))
student_record = os.getenv("STUDENTS_RECORD_FILE")
## ! Understand how pandas works internally
def update():
with open(student_record, "rb") as File:
value = pickle.load(File)
found = False
roll = int(input("Enter the roll number of the record"))
for i in value:
if roll == i[0]:
print(f"current name {i[1]}")
print(f"current marks {i[2]}")
i[1] = input("Enter the new name")
i[2] = int(input("Enter the new marks"))
found = True
if not found:
print("Record not found")
with open(student_record, "wb") as File:
pickle.dump(value, File)
update()
# ! Instead of AB use WB?
# ! It may have memory limits while updating large files but it would be good
# ! Few lakhs records would be fine and wouldn't create any much of a significant issues
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle binary/Update a binary file2.py",
"license": "MIT License",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle binary/question 1 (elegible for remedial, top marks).py | """Amit is a monitor of class XII-A and he stored the record of all
the students of his class in a file named “student_records.pkl”.
Structure of record is [roll number, name, percentage]. His computer
teacher has assigned the following duty to Amit
Write a function remcount( ) to count the number of students who need
remedial class (student who scored less than 40 percent)
and find the top students of the class.
We have to find weak students and bright students.
"""
## Find bright students and weak students
from dotenv import load_dotenv
import os
base = os.path.dirname(__file__)
load_dotenv(os.path.join(base, ".env"))
student_record = os.getenv("STUDENTS_RECORD_FILE")
import pickle
import logging
# Define logger with info
# import polar
## ! Unoptimised rehne de abhi ke liye
def remcount():
with open(student_record, "rb") as F:
val = pickle.load(F)
count = 0
weak_students = []
for student in val:
if student[2] <= 40:
print(f"{student} eligible for remedial")
weak_students.append(student)
count += 1
print(f"the total number of weak students are {count}")
print(f"The weak students are {weak_students}")
# ! highest marks is the key here first marks
def firstmark():
with open(student_record, "rb") as F:
val = pickle.load(F)
count = 0
main = [i[2] for i in val]
top = max(main)
print(top, "is the first mark")
for i in val:
if top == i[2]:
print(f"{i}\ncongrats")
count += 1
print("The total number of students who secured top marks are", count)
remcount()
firstmark()
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle binary/question 1 (elegible for remedial, top marks).py",
"license": "MIT License",
"lines": 47,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle binary/read.py | import pickle
def binary_read():
with open("studrec.dat", "rb") as b:
stud = pickle.load(b)
print(stud)
# prints the whole record in nested list format
print("contents of binary file")
for ch in stud:
print(ch) # prints one of the chosen rec in list
rno = ch[0]
rname = ch[1] # due to unpacking the val not printed in list format
rmark = ch[2]
print(rno, rname, rmark, end="\t")
binary_read()
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle binary/read.py",
"license": "MIT License",
"lines": 14,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle binary/search record in binary file.py | # binary file to search a given record
import pickle
from dotenv import load_dotenv
def search():
with open("student_records.pkl", "rb") as F:
# your file path will be different
search = True
rno = int(input("Enter the roll number of the student"))
for i in pickle.load(F):
if i[0] == rno:
print(f"Record found successfully\n{i}")
search = False
if search:
print("Sorry! record not found")
search()
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle binary/search record in binary file.py",
"license": "MIT License",
"lines": 15,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle text/counter.py | """
Class resposible for counting words for different files:
- Reduce redundant code
- Easier code management/debugging
- Code readability
"""
## ! Is there any other way than doing it linear?
## ! What will be test cases of it?
# ! Please do let me know.
## ! Can add is digit, isspace methods too later on.
# ! Based on requirements of it
## ! The questions are nothing but test-cases
## ! Make a test thing and handle it.
# does it count only alphabets or numerics too?
# ? what about other characters?
class Counter:
def __init__(self, text: str) -> None:
self.text = text
# Define the initial count of the lower and upper case.
self.count_lower = 0
self.count_upper = 0
self.compute()
def compute(self) -> None:
for char in self.text:
if char.islower():
self.count_lower += 1
elif char.isupper():
self.count_upper += 1
def get_total_lower(self) -> int:
return self.count_lower
def get_total_upper(self) -> int:
return self.count_upper
def get_total_chars(self) -> int:
return self.count_lower + self.count_upper
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle text/counter.py",
"license": "MIT License",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle text/file handle 12 length of line in text file.py | import os
import time
file_name = input("Enter the file name to create:- ")
print(file_name)
def write_to_file(file_name):
if os.path.exists(file_name):
print(f"Error: {file_name} already exists.")
return
with open(file_name, "a") as F:
while True:
text = input("enter any text to add in the file:- ")
F.write(f"{text}\n")
choice = input("Do you want to enter more, y/n").lower()
if choice == "n":
break
def longlines():
with open(file_name, encoding="utf-8") as F:
lines = F.readlines()
lines_less_than_50 = list(filter(lambda line: len(line) < 50, lines))
if not lines_less_than_50:
print("There is no line which is less than 50")
else:
for i in lines_less_than_50:
print(i, end="\t")
if __name__ == "__main__":
write_to_file(file_name)
time.sleep(1)
longlines()
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle text/file handle 12 length of line in text file.py",
"license": "MIT License",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle text/input,output and error streams.py | # practicing with streams
import sys
sys.stdout.write("Enter the name of the file")
file = sys.stdin.readline()
with open(
file.strip(),
) as F:
while True:
ch = F.readlines()
for i in ch: # ch is the whole file,for i in ch gives lines, for j in i gives letters,for j in i.split gives words
print(i, end="")
else:
sys.stderr.write("End of file reached")
break
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle text/input,output and error streams.py",
"license": "MIT License",
"lines": 14,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle text/question 2.py | """Write a method/function DISPLAYWORDS() in python to read lines
from a text file STORY.TXT,
using read function
and display those words, which are less than 4 characters."""
print("Hey!! You can print the word which are less then 4 characters")
def display_words(file_path):
try:
with open(file_path) as F:
words = F.read().split()
words_less_than_40 = list(filter(lambda word: len(word) < 4, words))
for word in words_less_than_40:
print(word)
return (
"The total number of the word's count which has less than 4 characters",
(len(words_less_than_40)),
)
except FileNotFoundError:
print("File not found")
print("Just need to pass the path of your file..")
file_path = input("Please, Enter file path: ")
if __name__ == "__main__":
print(display_words(file_path))
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle text/question 2.py",
"license": "MIT License",
"lines": 22,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle text/question 5.py | """Write a function in python to count the number of lowercase
alphabets present in a text file “happy.txt"""
import time
import os
from counter import Counter
print(
"You will see the count of lowercase, uppercase and total count of alphabets in provided file.."
)
file_path = input("Please, Enter file path: ")
if os.path.exists(file_path):
print("The file exists and this is the path:\n", file_path)
def lowercase(file_path):
try:
with open(file_path) as F:
word_counter = Counter(F.read())
print(
f"The total number of lower case letters are {word_counter.get_total_lower()}"
)
time.sleep(0.5)
print(
f"The total number of upper case letters are {word_counter.get_total_upper()}"
)
time.sleep(0.5)
print(f"The total number of letters are {word_counter.get_total()}")
time.sleep(0.5)
except FileNotFoundError:
print("File is not exist.. Please check AGAIN")
if __name__ == "__main__":
lowercase(file_path)
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle text/question 5.py",
"license": "MIT License",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle text/question 6.py | """Write a function in python to count the number of lowercase
alphabets present in a text file “happy.txt”"""
from counter import Counter
def lowercase():
with open("happy.txt") as F:
word_counter = Counter(F.read())
print(
f"The total number of lower case letters are {word_counter.get_total_lower()}"
)
print(
f"The total number of upper case letters are {word_counter.get_total_upper()}"
)
print(f"The total number of letters are {word_counter.get_total()}")
if __name__ == "__main__":
lowercase()
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle text/question 6.py",
"license": "MIT License",
"lines": 15,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle text/question3.py | """Write a user-defined function named count() that will read
the contents of text file named “happy.txt” and count
the number of lines which starts with either “I‟ or “M‟."""
import os
import time
file_name = input("Enter the file name to create:- ")
# step1:
print(file_name)
def write_to_file(file_name):
if os.path.exists(file_name):
print(f"Error: {file_name} already exists.")
else:
with open(file_name, "a") as F:
while True:
text = input("enter any text")
F.write(f"{text}\n")
if input("do you want to enter more, y/n").lower() == "n":
break
# step2:
def check_first_letter():
with open(file_name) as F:
lines = F.read().split()
# store all starting letters from each line in one string after converting to lower case
first_letters = "".join([line[0].lower() for line in lines])
count_i = first_letters.count("i")
count_m = first_letters.count("m")
print(
f"The total number of sentences starting with I or M are {count_i + count_m}"
)
if __name__ == "__main__":
write_to_file(file_name)
time.sleep(1)
check_first_letter()
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle text/question3.py",
"license": "MIT License",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle text/special symbol after word.py | with open("happy.txt", "r") as F:
# method 1
for i in F.read().split():
print(i, "*", end="")
print("\n")
# method 2
F.seek(0)
for line in F.readlines():
for word in line.split():
print(word, "*", end="")
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle text/special symbol after word.py",
"license": "MIT License",
"lines": 10,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:file_handle/File handle binary/update2.py | import pickle
import os
from dotenv import load_dotenv
base = os.path.dirname(__file__)
load_dotenv(os.path.join(base, ".env"))
student_record = os.getenv("STUDENTS_RECORD_FILE")
def update():
with open(student_record, "rb") as F:
S = pickle.load(F)
found = False
rno = int(input("enter the roll number you want to update"))
for i in S:
if rno == i[0]:
print(f"the currrent name is {i[1]}")
i[1] = input("enter the new name")
found = True
break
if found:
print("Record not found")
with open(student_record, "wb") as F:
pickle.dump(S, F)
update()
| {
"repo_id": "geekcomputers/Python",
"file_path": "file_handle/File handle binary/update2.py",
"license": "MIT License",
"lines": 22,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Quizzler Using Tkinter and Trivia DB API/data_dynamic.py |
'''
This file is responsible for fetching quiz questions from the Open Trivia Database API.
'''
import requests
parameters = {
"amount": 10,
"type": "multiple",
"category": 18
}
error_message = ""
try:
response = requests.get(url="https://opentdb.com/api.php", params=parameters, timeout=10)
response.raise_for_status() # Raise an exception for HTTP errors
question_data = response.json()["results"]
print("Questions loaded successfully from the API.")
except requests.exceptions.ConnectionError:
error_message = "Network connection is poor. Please check your internet connection."
question_data = []
except requests.exceptions.Timeout:
error_message = "Request timed out. Internet speed might be too slow."
question_data = []
except requests.exceptions.RequestException as e:
error_message = f"An error occurred: {e}"
question_data = []
| {
"repo_id": "geekcomputers/Python",
"file_path": "Quizzler Using Tkinter and Trivia DB API/data_dynamic.py",
"license": "MIT License",
"lines": 24,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Quizzler Using Tkinter and Trivia DB API/data_static.py | question_data = [
{
"question": "What is one of the main impacts of progress in hardware technologies on software?",
"correct_answer": "Need for more sophisticated programs",
"incorrect_answers": [
"Increase in hardware prices",
"Decrease in computational power",
"Less complex problems for software engineers"
]
},
{
"question": "How have software engineers coped with the challenges of increasing computational capabilities?",
"correct_answer": "By innovating and building on past experiences",
"incorrect_answers": [
"By reducing programming efforts",
"By simplifying programming languages",
"By avoiding large and complex problems"
]
},
{
"question": "Which of the following is a definition of software engineering according to IEEE?",
"correct_answer": "The application of systematic, disciplined, quantifiable approach to software development, operation, and maintenance",
"incorrect_answers": [
"The art of writing computer programs",
"An engineering approach to developing software",
"A collection of unorganized programming techniques"
]
},
{
"question": "Why is software engineering similar to other engineering disciplines?",
"correct_answer": "It uses well-understood and well-documented principles",
"incorrect_answers": [
"It makes use of subjective judgement and ill understood principles",
"It often avoids conflicting goals",
"It relies solely on qualitative attributes"
]
},
{
"question": "Which statement supports the idea that software engineering is not just an art?",
"correct_answer": "It organizes experiences and provides theoretical bases to experimental observations",
"incorrect_answers": [
"It makes subjective judgement based on qualitative attributes",
"It avoids systematic and disciplined approaches",
"It does not require tradeoffs in problem solving"
]
},
{
"question": "How have software engineering principles evolved over the last sixty years?",
"correct_answer": "From an art form to an engineering discipline",
"incorrect_answers": [
"From a science to an art form",
"From a craft to an art form",
"From an engineering discipline to a craft"
]
},
{
"question": "Which programming style is characterized by quickly developing a program without any specification, plan, or design?",
"correct_answer": "Build and fix",
"incorrect_answers": [
"Exploratory",
"Code and fix",
"Ad hoc"
]
},
{
"question": "According to the text, what has been a symptom of the present software crisis?",
"correct_answer": "Increasing software costs compared to hardware",
"incorrect_answers": [
"Decrease in software development costs",
"Software products becoming easier to alter and debug",
"Software products being delivered on time"
]
},
{
"question": "What is one of the main benefits of adopting software engineering techniques according to the text?",
"correct_answer": "Developing high quality software cost effectively and timely",
"incorrect_answers": [
"Increasing hardware costs",
"Avoiding the use of scientific principles",
"Making software development more subjective"
]
},
{
"question": "What is a key characteristic of toy software?",
"correct_answer": "Lack good user interface and proper documentation",
"incorrect_answers": [
"Developed by a team of professionals",
"Large in size and highly complex",
"Thoroughly tested and maintained"
]
}
# {
# "question": "What differentiates professional software from toy software?",
# "correct_answer": "Professional software is systematically designed, carefully implemented, and thoroughly tested",
# "incorrect_answers": [
# "Professional software is usually developed by a single individual",
# "Professional software lacks supporting documents",
# "Professional software is used by a single user"
# ]
# },
# {
# "question": "What is a key feature of software services projects?",
# "correct_answer": "They often involve the development of customized software",
# "incorrect_answers": [
# "They are always largescale projects",
# "They involve the development of off-the-shelf software",
# "They are never outsourced to other companies"
# ]
# },
# {
# "question": "Why might a company choose to outsource part of its software development work?",
# "correct_answer": "To develop some parts cost effectively or to use external expertise",
# "incorrect_answers": [
# "To ensure all development work is done internally",
# "Because it has more expertise than the outsourcing company",
# "To avoid completing the project on time"
# ]
# },
# {
# "question": "What type of software is typically developed in a short time frame and at a low cost?",
# "correct_answer": "Toy software",
# "incorrect_answers": [
# "Generic software products",
# "Customized software",
# "Professional software"
# ]
# },
# {
# "question": "What has been a traditional focus of Indian software companies?",
# "correct_answer": "Executing software services projects",
# "incorrect_answers": [
# "Developing largescale generic software products",
# "Avoiding any type of software development",
# "Developing only toy software"
# ]
# },
# {
# "question": "What is the primary characteristic of the exploratory style of software development?",
# "correct_answer": "Complete freedom for the programmer to choose development activities",
# "incorrect_answers": [
# "Strict adherence to development rules and guidelines",
# "Development of software based on detailed specifications",
# "Use of structured and well-documented procedures"
# ]
# },
# {
# "question": "What typically initiates the coding process in the exploratory development style?",
# "correct_answer": "Initial customer briefing about requirements",
# "incorrect_answers": [
# "Completion of a detailed design document",
# "Formal approval from a project manager",
# "Completion of a feasibility study"
# ]
# },
# {
# "question": "What is a major limitation of the exploratory development style for large sized software projects?",
# "correct_answer": "Development time and effort grow exponentially with problem size",
# "incorrect_answers": [
# "Requires a large team of developers",
# "Results in highly structured and high quality code",
# "Easily allows for concurrent work among multiple developers"
# ]
# },
# {
# "question": "What difficulty arises when using the exploratory style in a team development environment?",
# "correct_answer": "Difficulty in partitioning work among developers due to lack of proper design and documentation",
# "incorrect_answers": [
# "Easy partitioning of work among developers",
# "Development work is based on a detailed design",
# "Use of structured and well documented code"
# ]
# },
# {
# "question": "In what scenario can the exploratory development style be successful?",
# "correct_answer": "Developing very small programs",
# "incorrect_answers": [
# "Developing largescale enterprise software",
# "Implementing critical safety systems",
# "Managing large, distributed teams"
# ]
# },
# {
# "question": "What was the primary programming style used in the 1950s?",
# "correct_answer": "Build and fix (exploratory programming) style",
# "incorrect_answers": [
# "Object-oriented programming",
# "Control flow-based design",
# "Data flow-oriented design"
# ]
# }
] | {
"repo_id": "geekcomputers/Python",
"file_path": "Quizzler Using Tkinter and Trivia DB API/data_static.py",
"license": "MIT License",
"lines": 191,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Quizzler Using Tkinter and Trivia DB API/main.py |
"""This file processes the fetched questions and prepares them for use in the quiz."""
from question_model import Question
from data_dynamic import question_data
from quiz_brain import QuizBrain
from ui import QuizInterface
# question_bank = []
# question_text = question["question"]
# question_answer = question["correct_answer"]
# question_options = question["incorrect_answers"] + [question["correct_answer"]]
# new_question = Question(question_text, question_answer, question_options)
# question_bank.append(new_question)
# list comprehension
question_bank = [
Question(
question["question"],
question["correct_answer"],
question["incorrect_answers"] + [question["correct_answer"]]
)
for question in question_data
]
quiz = QuizBrain(question_bank)
quiz_ui = QuizInterface(quiz)
| {
"repo_id": "geekcomputers/Python",
"file_path": "Quizzler Using Tkinter and Trivia DB API/main.py",
"license": "MIT License",
"lines": 22,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Quizzler Using Tkinter and Trivia DB API/question_model.py | class Question:
def __init__(self, q_text, q_answer, q_options):
self.text = q_text
self.answer = q_answer
self.options = q_options
| {
"repo_id": "geekcomputers/Python",
"file_path": "Quizzler Using Tkinter and Trivia DB API/question_model.py",
"license": "MIT License",
"lines": 5,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Quizzler Using Tkinter and Trivia DB API/quiz_brain.py |
"""This file contains the logic that drives the quiz game, including managing the current question, checking answers, and tracking the score."""
import html
class QuizBrain:
def __init__(self, q_list):
self.question_number = 0
self.score = 0
self.question_list = q_list
self.current_question = None
def still_has_questions(self):
return self.question_number < len(self.question_list)
def next_question(self):
self.current_question = self.question_list[self.question_number]
self.question_number += 1
q_text = html.unescape(self.current_question.text)
return f"Q.{self.question_number}: {q_text}"
def check_answer(self, user_answer):
correct_answer = self.current_question.answer
return user_answer.lower() == correct_answer.lower()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Quizzler Using Tkinter and Trivia DB API/quiz_brain.py",
"license": "MIT License",
"lines": 18,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Quizzler Using Tkinter and Trivia DB API/ui.py |
"""This file manages the graphical user interface of the quiz, using Tkinter to display questions, answer options, and the score to the user."""
from tkinter import *
from quiz_brain import QuizBrain
from data_dynamic import error_message
# Normal screen
BACKGROUND = "#608BC1"
CANVAS = "#CBDCEB"
TEXT = "#133E87"
# If answer is right
R_BACKGROUND = "#859F3D"
R_CANVAS = "#F6FCDF"
R_TEXT = "#31511E"
# If answer is wrong
W_BACKGROUND = "#C63C51"
W_CANVAS = "#D95F59"
W_TEXT = "#522258"
FONT = ("Lucida sans", 20)
class QuizInterface:
def __init__(self, quiz_brain: QuizBrain):
self.quiz = quiz_brain
self.window = Tk()
self.window.title("Quizzler")
self.window.config(padx=20, pady=20, bg=BACKGROUND)
self.score_label = Label(text="Score: 0", fg="white", bg=BACKGROUND, font=("Lucida sans", 15, "bold"))
self.score_label.grid(row=0, column=1)
self.canvas = Canvas(width=1000, height=550, bg=CANVAS)
self.question_text = self.canvas.create_text(
500, 100, width=800, text="Some question text", fill=TEXT, font=FONT, anchor="center", justify="center"
)
self.canvas.grid(row=1, column=0, columnspan=2, pady=50)
self.opt_selected = IntVar()
self.options = self.create_radio_buttons()
self.submit_button = Button(
text="Submit", command=self.submit_answer, fg=TEXT, font=FONT
)
self.submit_button.grid(row=3, column=0, columnspan=2)
if error_message:
self.display_error_message(error_message)
else:
self.get_next_question()
self.window.mainloop()
def create_radio_buttons(self):
radio_buttons = []
y_position = 230
for i in range(4):
radio_button = Radiobutton(
self.canvas, text="", variable=self.opt_selected, value=i + 1, font=FONT, bg=CANVAS, anchor="w",
justify="left", fg=TEXT, wraplength=900
)
radio_buttons.append(radio_button)
self.canvas.create_window(50, y_position, window=radio_button, anchor="w")
y_position += 65
return radio_buttons
def get_next_question(self):
if self.quiz.still_has_questions():
self.opt_selected.set(0) # Reset selection
q_text = self.quiz.next_question()
self.canvas.itemconfig(self.question_text, text=q_text)
self.canvas.config(bg=CANVAS)
self.window.config(bg=BACKGROUND)
for option in self.options:
option.config(bg=CANVAS, fg=TEXT)
self.display_options()
self.score_label.config(bg=BACKGROUND, text=f"Score: {self.quiz.score}")
self.canvas.itemconfig(self.question_text, fill=TEXT)
else:
self.display_result()
def display_options(self):
current_options = self.quiz.current_question.options
for i, option in enumerate(current_options):
self.options[i].config(text=option)
def submit_answer(self):
selected_option_index = self.opt_selected.get() - 1
if selected_option_index >= 0:
user_answer = self.quiz.current_question.options[selected_option_index]
self.quiz.check_answer(user_answer)
if self.quiz.check_answer(user_answer):
self.quiz.score += 1
self.canvas.config(bg=R_CANVAS)
self.window.config(bg=R_BACKGROUND)
for option in self.options:
option.config(bg=R_CANVAS, fg=R_TEXT)
self.canvas.itemconfig(self.question_text, fill=R_TEXT)
self.score_label.config(bg=R_BACKGROUND)
else:
self.canvas.config(bg=W_CANVAS)
self.window.config(bg=W_BACKGROUND)
for option in self.options:
option.config(bg=W_CANVAS, fg=W_TEXT)
self.canvas.itemconfig(self.question_text, fill=W_TEXT)
self.score_label.config(bg=W_BACKGROUND)
self.window.after(1000, self.get_next_question)
def display_result(self):
for option in self.options:
option.config(bg=CANVAS, fg=TEXT)
option.destroy()
if self.quiz.score <= 3:
self.result_text = f"You've completed the quiz!\nYour final score: {self.quiz.score}/{self.quiz.question_number}\nBetter luck next time! Keep practicing!"
elif self.quiz.score <= 6:
self.result_text = f"You've completed the quiz!\nYour final score: {self.quiz.score}/{self.quiz.question_number}\nGood job! You're getting better!"
elif self.quiz.score <= 8:
self.result_text = f"You've completed the quiz!\nYour final score: {self.quiz.score}/{self.quiz.question_number}\nGreat work! You're almost there!"
else:
self.result_text = f"You've completed the quiz!\nYour final score: {self.quiz.score}/{self.quiz.question_number}\nExcellent! You're a Quiz Master!"
self.score_label.config(bg=BACKGROUND, text=f"Score: {self.quiz.score}")
self.canvas.config(bg=CANVAS)
self.window.config(bg=BACKGROUND)
self.canvas.itemconfig(self.question_text, fill=TEXT)
self.score_label.config(bg=BACKGROUND)
self.canvas.itemconfig(self.question_text, text=self.result_text)
self.canvas.coords(self.question_text, 500, 225) # Centered position
self.submit_button.config(state="disabled")
def display_error_message(self, message):
for option in self.options:
option.config(bg=CANVAS, fg=TEXT)
option.destroy()
self.canvas.itemconfig(self.question_text, text=message)
self.canvas.coords(self.question_text, 500, 225) # Centered position
self.submit_button.config(state="disabled")
| {
"repo_id": "geekcomputers/Python",
"file_path": "Quizzler Using Tkinter and Trivia DB API/ui.py",
"license": "MIT License",
"lines": 119,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:Snake Game Using Turtle/colors.py | """
This file contains the color palette for the game, now including
colors for the new interactive buttons.
"""
# A fresh and vibrant color theme
# --> food.py
FOOD_COLOR = "#C70039" # A bright, contrasting red
# --> main.py
BG_COLOR = '#F0F8FF' # AliceBlue, a very light and clean background
# --> scoreboard.py
GAME_OVER_COLOR = '#D21312' # Strong red for game over message
SCORE_COLOR = '#27374D' # Dark blue for high-contrast text
MESSAGE_COLOR = '#27374D' # Consistent dark blue for other messages
# --> snake.py
FIRST_SEGMENT_COLOR = '#006400' # DarkGreen for the snake's head
BODY_COLOR = '#2E8B57' # SeaGreen for the snake's body
# --> wall.py
WALL_COLOR = '#27374D' # Dark blue for a solid, visible border
# --> UI Controls (Buttons)
BUTTON_BG_COLOR = "#526D82"
BUTTON_TEXT_COLOR = "#F0F8FF"
BUTTON_BORDER_COLOR = "#27374D"
| {
"repo_id": "geekcomputers/Python",
"file_path": "Snake Game Using Turtle/colors.py",
"license": "MIT License",
"lines": 22,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Snake Game Using Turtle/food.py | """
This file handles the creation of food. Its placement is now controlled
by the main game logic to ensure it spawns within the correct boundaries.
"""
from turtle import Turtle
import random
import colors
class Food(Turtle):
""" This class generates food for the snake to eat. """
def __init__(self):
super().__init__()
self.shape("circle")
self.penup()
self.shapesize(stretch_len=0.7, stretch_wid=0.7)
self.color(colors.FOOD_COLOR)
self.speed("fastest")
def refresh(self, left_wall, right_wall, bottom_wall, top_wall):
"""Moves the food to a new random position within the provided game boundaries."""
# Add a margin so food doesn't spawn exactly on the edge
margin = 20
random_x = random.randint(int(left_wall) + margin, int(right_wall) - margin)
random_y = random.randint(int(bottom_wall) + margin, int(top_wall) - margin)
self.goto(random_x, random_y)
| {
"repo_id": "geekcomputers/Python",
"file_path": "Snake Game Using Turtle/food.py",
"license": "MIT License",
"lines": 23,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Snake Game Using Turtle/main.py | """
This is the main file that runs the Snake game.
It handles screen setup, dynamic boundaries, UI controls (buttons),
game state management, and the main game loop.
"""
from turtle import Screen, Turtle
from snake import Snake
from food import Food
from scoreboard import Scoreboard
from wall import Wall
import colors
# --- CONSTANTS ---
MOVE_DELAY_MS = 100 # Game speed in milliseconds
# --- GAME STATE ---
game_state = "start" # Possible states: "start", "playing", "paused", "game_over"
# --- SCREEN SETUP ---
screen = Screen()
screen.setup(width=0.9, height=0.9) # Set up a nearly fullscreen window
screen.bgcolor(colors.BG_COLOR)
screen.title("Interactive Snake Game")
screen.tracer(0)
# --- DYNAMIC GAME BOUNDARIES ---
WIDTH = screen.window_width()
HEIGHT = screen.window_height()
# These boundaries are calculated to be inside the visible wall with a safe margin
LEFT_WALL = -WIDTH / 2 + 25
RIGHT_WALL = WIDTH / 2 - 25
TOP_WALL = HEIGHT / 2 - 85
BOTTOM_WALL = -HEIGHT / 2 + 25
# --- GAME OBJECTS ---
wall = Wall()
snake = Snake()
food = Food()
# Initial food placement is now handled after boundaries are calculated
food.refresh(LEFT_WALL, RIGHT_WALL, BOTTOM_WALL, TOP_WALL)
scoreboard = Scoreboard()
# --- UI CONTROLS (BUTTONS) ---
buttons = {} # Dictionary to hold button turtles and their properties
def create_button(name, x, y, width=120, height=40):
"""Creates a turtle-based button with a label."""
if name in buttons and buttons[name]['turtle'] is not None:
buttons[name]['turtle'].clear()
button_turtle = Turtle()
button_turtle.hideturtle()
button_turtle.penup()
button_turtle.speed("fastest")
button_turtle.goto(x - width/2, y - height/2)
button_turtle.color(colors.BUTTON_BORDER_COLOR, colors.BUTTON_BG_COLOR)
button_turtle.begin_fill()
for _ in range(2):
button_turtle.forward(width)
button_turtle.left(90)
button_turtle.forward(height)
button_turtle.left(90)
button_turtle.end_fill()
button_turtle.goto(x, y - 12)
button_turtle.color(colors.BUTTON_TEXT_COLOR)
button_turtle.write(name, align="center", font=("Lucida Sans", 14, "bold"))
buttons[name] = {'turtle': button_turtle, 'x': x, 'y': y, 'w': width, 'h': height, 'visible': True}
def hide_button(name):
"""Hides a button by clearing its turtle."""
if name in buttons and buttons[name]['visible']:
buttons[name]['turtle'].clear()
buttons[name]['visible'] = False
def manage_buttons():
"""Shows or hides buttons based on the current game state."""
all_buttons = ["Play", "Pause", "Resume", "Restart"]
for btn_name in all_buttons:
hide_button(btn_name)
btn_x = WIDTH / 2 - 100
btn_y = HEIGHT / 2 - 45
if game_state == "start":
create_button("Play", 0, -100)
elif game_state == "playing":
create_button("Pause", btn_x, btn_y)
elif game_state == "paused":
create_button("Resume", btn_x, btn_y)
elif game_state == "game_over":
create_button("Restart", btn_x, btn_y)
# --- GAME LOGIC & STATE TRANSITIONS ---
def start_game():
global game_state
if game_state == "start":
game_state = "playing"
scoreboard.update_scoreboard()
def toggle_pause_resume():
global game_state
if game_state == "playing":
game_state = "paused"
scoreboard.display_pause()
elif game_state == "paused":
game_state = "playing"
scoreboard.update_scoreboard()
def restart_game():
global game_state
if game_state == "game_over":
game_state = "playing"
snake.reset()
food.refresh(LEFT_WALL, RIGHT_WALL, BOTTOM_WALL, TOP_WALL)
scoreboard.reset()
def is_click_on_button(name, x, y):
"""Checks if a click (x, y) is within the bounds of a visible button."""
if name in buttons and buttons[name]['visible']:
btn = buttons[name]
return (btn['x'] - btn['w']/2 < x < btn['x'] + btn['w']/2 and
btn['y'] - btn['h']/2 < y < btn['y'] + btn['h']/2)
return False
def handle_click(x, y):
"""Main click handler to delegate actions based on button clicks."""
if game_state == "start" and is_click_on_button("Play", x, y):
start_game()
elif game_state == "playing" and is_click_on_button("Pause", x, y):
toggle_pause_resume()
elif game_state == "paused" and is_click_on_button("Resume", x, y):
toggle_pause_resume()
elif game_state == "game_over" and is_click_on_button("Restart", x, y):
restart_game()
# --- KEYBOARD HANDLERS ---
def handle_snake_up():
if game_state in ["start", "playing"]:
start_game()
snake.up()
def handle_snake_down():
if game_state in ["start", "playing"]:
start_game()
snake.down()
def handle_snake_left():
if game_state in ["start", "playing"]:
start_game()
snake.left()
def handle_snake_right():
if game_state in ["start", "playing"]:
start_game()
snake.right()
# --- KEY & MOUSE BINDINGS ---
screen.listen()
screen.onkey(handle_snake_up, "Up")
screen.onkey(handle_snake_down, "Down")
screen.onkey(handle_snake_left, "Left")
screen.onkey(handle_snake_right, "Right")
screen.onkey(toggle_pause_resume, "space")
screen.onkey(restart_game, "r")
screen.onkey(restart_game, "R")
screen.onclick(handle_click)
# --- MAIN GAME LOOP ---
def game_loop():
global game_state
if game_state == "playing":
snake.move()
# Collision with food
if snake.head.distance(food) < 20:
food.refresh(LEFT_WALL, RIGHT_WALL, BOTTOM_WALL, TOP_WALL)
snake.extend()
scoreboard.increase_score()
# Collision with wall
if not (LEFT_WALL < snake.head.xcor() < RIGHT_WALL and BOTTOM_WALL < snake.head.ycor() < TOP_WALL):
game_state = "game_over"
scoreboard.game_over()
# Collision with tail
for segment in snake.segments[1:]:
if snake.head.distance(segment) < 10:
game_state = "game_over"
scoreboard.game_over()
manage_buttons()
screen.update()
screen.ontimer(game_loop, MOVE_DELAY_MS)
# --- INITIALIZE GAME ---
scoreboard.display_start_message()
game_loop()
screen.exitonclick()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Snake Game Using Turtle/main.py",
"license": "MIT License",
"lines": 170,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:Snake Game Using Turtle/scoreboard.py | """
This file manages the display of the score, high score, and game messages.
It now positions the score dynamically in the top-left corner.
"""
from turtle import Turtle, Screen
import colors
# Constants for styling and alignment
ALIGNMENT = "left"
SCORE_FONT = ("Lucida Sans", 20, "bold")
MESSAGE_FONT = ("Courier", 40, "bold")
INSTRUCTION_FONT = ("Lucida Sans", 16, "normal")
class Scoreboard(Turtle):
""" This class maintains the scoreboard, high score, and game messages. """
def __init__(self):
super().__init__()
self.screen = Screen() # Get access to the screen object
self.score = 0
self.high_score = self.load_high_score()
self.penup()
self.hideturtle()
self.update_scoreboard()
def load_high_score(self):
"""Loads high score from highscore.txt. Returns 0 if not found."""
try:
with open("highscore.txt", mode="r") as file:
return int(file.read())
except (FileNotFoundError, ValueError):
return 0
def update_scoreboard(self):
"""Clears and rewrites the score and high score in the top-left corner."""
self.clear()
self.color(colors.SCORE_COLOR)
# Dynamically calculate position to be well-placed in the header
x_pos = -self.screen.window_width() / 2 + 30
y_pos = self.screen.window_height() / 2 - 60
self.goto(x_pos, y_pos)
self.write(f"Score: {self.score} | High Score: {self.high_score}", align=ALIGNMENT, font=SCORE_FONT)
def increase_score(self):
"""Increases score and updates the display."""
self.score += 1
self.update_scoreboard()
def reset(self):
"""Checks for new high score, saves it, and resets the score."""
if self.score > self.high_score:
self.high_score = self.score
with open("highscore.txt", mode="w") as file:
file.write(str(self.high_score))
self.score = 0
self.update_scoreboard()
def game_over(self):
"""Displays the Game Over message and instructions."""
self.goto(0, 40)
self.color(colors.GAME_OVER_COLOR)
self.write("GAME OVER", align="center", font=MESSAGE_FONT)
self.goto(0, -40)
self.write("Click 'Restart' or Press 'R'", align="center", font=INSTRUCTION_FONT)
def display_pause(self):
"""Displays the PAUSED message."""
self.goto(0, 40)
self.color(colors.MESSAGE_COLOR)
self.write("PAUSED", align="center", font=MESSAGE_FONT)
self.goto(0, -40)
self.write("Click 'Resume' or Press 'Space'", align="center", font=INSTRUCTION_FONT)
def display_start_message(self):
"""Displays the welcome message and starting instructions."""
self.goto(0, 40)
self.color(colors.MESSAGE_COLOR)
self.write("SNAKE GAME", align="center", font=MESSAGE_FONT)
self.goto(0, -40)
self.write("Click 'Play' or an Arrow Key to Start", align="center", font=INSTRUCTION_FONT)
| {
"repo_id": "geekcomputers/Python",
"file_path": "Snake Game Using Turtle/scoreboard.py",
"license": "MIT License",
"lines": 70,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Snake Game Using Turtle/snake.py | """
This file is responsible for creating the snake and managing its movement,
extension, and reset functionality.
"""
from turtle import Turtle
import colors
STARTING_POSITIONS = [(0, 0), (-20, 0), (-40, 0)]
MOVE_DISTANCE = 20
UP, DOWN, LEFT, RIGHT = 90, 270, 180, 0
class Snake:
""" This class creates a snake body and contains methods for movement and extension. """
def __init__(self):
self.segments = []
self.create_snake()
self.head = self.segments[0]
def create_snake(self):
""" Creates the initial snake body. """
for position in STARTING_POSITIONS:
self.add_segment(position)
self.segments[0].color(colors.FIRST_SEGMENT_COLOR)
def add_segment(self, position):
""" Adds a new segment to the snake. """
new_segment = Turtle(shape="square")
new_segment.penup()
new_segment.goto(position)
new_segment.color(colors.BODY_COLOR)
self.segments.append(new_segment)
def extend(self):
""" Adds a new segment to the snake's tail. """
self.add_segment(self.segments[-1].position())
self.segments[0].color(colors.FIRST_SEGMENT_COLOR)
def move(self):
""" Moves the snake forward by moving each segment to the position of the one in front."""
for i in range(len(self.segments) - 1, 0, -1):
x = self.segments[i - 1].xcor()
y = self.segments[i - 1].ycor()
self.segments[i].goto(x, y)
self.head.forward(MOVE_DISTANCE)
def reset(self):
"""Hides the old snake and creates a new one for restarting the game."""
for segment in self.segments:
segment.hideturtle()
self.segments.clear()
self.create_snake()
self.head = self.segments[0]
def up(self):
"""Turns the snake's head upwards, preventing it from reversing."""
if self.head.heading() != DOWN:
self.head.setheading(UP)
def down(self):
"""Turns the snake's head downwards, preventing it from reversing."""
if self.head.heading() != UP:
self.head.setheading(DOWN)
def left(self):
"""Turns the snake's head to the left, preventing it from reversing."""
if self.head.heading() != RIGHT:
self.head.setheading(LEFT)
def right(self):
"""Turns the snake's head to the right, preventing it from reversing."""
if self.head.heading() != LEFT:
self.head.setheading(RIGHT)
| {
"repo_id": "geekcomputers/Python",
"file_path": "Snake Game Using Turtle/snake.py",
"license": "MIT License",
"lines": 61,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Snake Game Using Turtle/wall.py | """This file creates a responsive boundary wall that adapts to the game window size."""
from turtle import Turtle, Screen
import colors
class Wall:
""" This class creates a wall around the game screen that adjusts to its dimensions. """
def __init__(self):
self.screen = Screen()
self.create_wall()
def create_wall(self):
"""Draws a responsive game border and a header area for the scoreboard and controls."""
width = self.screen.window_width()
height = self.screen.window_height()
# Calculate coordinates for the border based on screen size
top = height / 2
bottom = -height / 2
left = -width / 2
right = width / 2
wall = Turtle()
wall.hideturtle()
wall.speed("fastest")
wall.color(colors.WALL_COLOR)
wall.penup()
# Draw the main rectangular border
wall.goto(left + 10, top - 10)
wall.pendown()
wall.pensize(10)
wall.goto(right - 10, top - 10)
wall.goto(right - 10, bottom + 10)
wall.goto(left + 10, bottom + 10)
wall.goto(left + 10, top - 10)
# Draw a line to create a separate header section for the score and buttons
wall.penup()
wall.goto(left + 10, top - 70)
wall.pendown()
wall.pensize(5)
wall.goto(right - 10, top - 70)
self.screen.update()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Snake Game Using Turtle/wall.py",
"license": "MIT License",
"lines": 37,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Password Manager Using Tkinter/PGV.py | import json
new_data = {
website_input.get():{
"email": email_input.get(),
"password": passw_input.get()
}
}
try:
with open("data.json", "r") as data_file:
data = json.load(data_file)
except FileNotFoundError:
with open("data.json", "w") as data_file:
pass
else:
with open("data.json", "w") as data_file:
json.dump(new_data, data_file, indent = 4) | {
"repo_id": "geekcomputers/Python",
"file_path": "Password Manager Using Tkinter/PGV.py",
"license": "MIT License",
"lines": 16,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Password Manager Using Tkinter/main.py | import tkinter as tk
from tkinter import messagebox, simpledialog
import ttkbootstrap as ttk
from ttkbootstrap.constants import *
import pyperclip
import json
from random import choice, randint, shuffle
# ---------------------------- CONSTANTS ------------------------------- #
FONT_NAME = "Helvetica"
# IMP: this is not a secure way to store a master password.
# in a real application, this should be changed and stored securely (e.g., hashed and salted).
MASTER_PASSWORD = "password123"
# ---------------------------- PASSWORD GENERATOR ------------------------------- #
def generate_password():
"""generates a random strong password and copies it to clipboard."""
letters = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z']
numbers = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
symbols = ['!', '#', '$', '%', '&', '(', ')', '*', '+']
password_letters = [choice(letters) for _ in range(randint(8, 10))]
password_symbols = [choice(symbols) for _ in range(randint(2, 4))]
password_numbers = [choice(numbers) for _ in range(randint(2, 4))]
password_list = password_letters + password_symbols + password_numbers
shuffle(password_list)
password = "".join(password_list)
password_entry.delete(0, tk.END)
password_entry.insert(0, password)
pyperclip.copy(password)
messagebox.showinfo(title="Password Generated", message="Password copied to clipboard!")
# ---------------------------- SAVE PASSWORD ------------------------------- #
def save():
"""saves the website, email, and password to a JSON file."""
website = website_entry.get()
email = email_entry.get()
password = password_entry.get()
new_data = {
website: {
"email": email,
"password": password,
}
}
if not website or not password:
messagebox.showerror(title="Oops", message="Please don't leave any fields empty!")
return
is_ok = messagebox.askokcancel(title=website, message=f"These are the details entered: \nEmail: {email} "
f"\nPassword: {password} \nIs it ok to save?")
if is_ok:
try:
with open("data.json", "r") as data_file:
data = json.load(data_file)
except (FileNotFoundError, json.JSONDecodeError):
data = {}
data.update(new_data)
with open("data.json", "w") as data_file:
json.dump(data, data_file, indent=4)
website_entry.delete(0, tk.END)
password_entry.delete(0, tk.END)
# ---------------------------- FIND PASSWORD ------------------------------- #
def find_password():
"""finds and displays password for a given website."""
website = website_entry.get()
try:
with open("data.json", "r") as data_file:
data = json.load(data_file)
except (FileNotFoundError, json.JSONDecodeError):
messagebox.showerror(title="Error", message="No Data File Found.")
return
if website in data:
email = data[website]["email"]
password = data[website]["password"]
messagebox.showinfo(title=website, message=f"Email: {email}\nPassword: {password}")
pyperclip.copy(password)
messagebox.showinfo(title="Copied", message="Password for {} copied to clipboard.".format(website))
else:
messagebox.showerror(title="Error", message=f"No details for {website} exists.")
# ---------------------------- VIEW ALL PASSWORDS ------------------------------- #
def view_all_passwords():
"""prompts for master password and displays all saved passwords if correct."""
password = simpledialog.askstring("Master Password", "Please enter the master password:", show='*')
if password == MASTER_PASSWORD:
show_passwords_window()
elif password is not None: # avoids error message if user clicks cancel
messagebox.showerror("Incorrect Password", "The master password you entered is incorrect.")
def show_passwords_window():
"""creates a new window to display all passwords in a table."""
all_passwords_window = tk.Toplevel(window)
all_passwords_window.title("All Saved Passwords")
all_passwords_window.config(padx=20, pady=20)
# a frame for the treeview and scrollbar
tree_frame = ttk.Frame(all_passwords_window)
tree_frame.grid(row=0, column=0, columnspan=2, sticky='nsew')
# a Treeview (table)
cols = ('Website', 'Email', 'Password')
tree = ttk.Treeview(tree_frame, columns=cols, show='headings')
# column headings and widths
tree.heading('Website', text='Website')
tree.column('Website', width=150)
tree.heading('Email', text='Email/Username')
tree.column('Email', width=200)
tree.heading('Password', text='Password')
tree.column('Password', width=200)
tree.grid(row=0, column=0, sticky='nsew')
# a scrollbar
scrollbar = ttk.Scrollbar(tree_frame, orient=tk.VERTICAL, command=tree.yview)
tree.configure(yscroll=scrollbar.set)
scrollbar.grid(row=0, column=1, sticky='ns')
# load data from JSON file
try:
with open("data.json", "r") as data_file:
data = json.load(data_file)
# insert data into the treeview
for website, details in data.items():
tree.insert("", "end", values=(website, details['email'], details['password']))
except (FileNotFoundError, json.JSONDecodeError):
# if file not found or empty, it will just show an empty table
pass
def copy_selected_info(column_index, info_type):
"""copies the email or password of the selected row."""
selected_item = tree.focus()
if not selected_item:
messagebox.showwarning("No Selection", "Please select a row from the table first.", parent=all_passwords_window)
return
item_values = tree.item(selected_item, 'values')
info_to_copy = item_values[column_index]
pyperclip.copy(info_to_copy)
messagebox.showinfo("Copied!", f"The {info_type.lower()} for '{item_values[0]}' has been copied to your clipboard.", parent=all_passwords_window)
# a frame for the buttons
button_frame = ttk.Frame(all_passwords_window)
button_frame.grid(row=1, column=0, columnspan=2, pady=(10,0))
copy_email_button = ttk.Button(button_frame, text="Copy Email", style="success.TButton", command=lambda: copy_selected_info(1, "Email"))
copy_email_button.pack(side=tk.LEFT, padx=5)
copy_password_button = ttk.Button(button_frame, text="Copy Password", style="success.TButton", command=lambda: copy_selected_info(2, "Password"))
copy_password_button.pack(side=tk.LEFT, padx=5)
all_passwords_window.grab_set() # makes window modal
# ---------------------------- UI SETUP ------------------------------- #
window = ttk.Window(themename="superhero")
window.title("Password Manager")
window.config(padx=50, pady=50)
# logo
canvas = tk.Canvas(width=200, height=200, highlightthickness=0)
logo_img = tk.PhotoImage(file="logo.png")
canvas.create_image(100, 100, image=logo_img)
canvas.grid(row=0, column=1, pady=(0, 20))
# labels
website_label = ttk.Label(text="Website:", font=(FONT_NAME, 12))
website_label.grid(row=1, column=0, sticky="W")
email_label = ttk.Label(text="Email/Username:", font=(FONT_NAME, 12))
email_label.grid(row=2, column=0, sticky="W")
password_label = ttk.Label(text="Password:", font=(FONT_NAME, 12))
password_label.grid(row=3, column=0, sticky="W")
# entries
website_entry = ttk.Entry(width=32)
website_entry.grid(row=1, column=1, pady=5, sticky="EW")
website_entry.focus()
email_entry = ttk.Entry(width=50)
email_entry.grid(row=2, column=1, columnspan=2, pady=5, sticky="EW")
email_entry.insert(0, "example@email.com")
password_entry = ttk.Entry(width=32)
password_entry.grid(row=3, column=1, pady=5, sticky="EW")
# buttons
search_button = ttk.Button(text="Search", width=14, command=find_password, style="info.TButton")
search_button.grid(row=1, column=2, sticky="EW", padx=(5,0))
generate_password_button = ttk.Button(text="Generate Password", command=generate_password, style="success.TButton")
generate_password_button.grid(row=3, column=2, sticky="EW", padx=(5,0))
add_button = ttk.Button(text="Add", width=43, command=save, style="primary.TButton")
add_button.grid(row=4, column=1, columnspan=2, pady=(10,0), sticky="EW")
view_all_button = ttk.Button(text="View All Passwords", command=view_all_passwords, style="secondary.TButton")
view_all_button.grid(row=5, column=1, columnspan=2, pady=(10,0), sticky="EW")
window.mainloop()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Password Manager Using Tkinter/main.py",
"license": "MIT License",
"lines": 168,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:smart_file_organizer.py | #!/usr/bin/env python3
"""
Smart File Organizer
A utility script to organize files in a specified directory into categorized
subfolders based on file types.
Example categories include: Images, Documents, Videos, Audios, Archives, Scripts, Others.
Usage:
python smart_file_organizer.py --path "C:\\Users\\YourName\\Downloads" --interval 0
Arguments:
--path Directory path to organize.
--interval Interval in minutes to repeat automatically (0 = run once).
Author:
Sangam Paudel
"""
import os
import shutil
import argparse
import time
from datetime import datetime
FILE_CATEGORIES = {
"Images": [".jpg", ".jpeg", ".png", ".gif", ".bmp", ".tiff", ".svg"],
"Documents": [".pdf", ".doc", ".docx", ".txt", ".ppt", ".pptx", ".xls", ".xlsx"],
"Videos": [".mp4", ".mkv", ".mov", ".avi", ".flv", ".wmv"],
"Audios": [".mp3", ".wav", ".aac", ".flac", ".ogg"],
"Archives": [".zip", ".rar", ".tar", ".gz", ".7z"],
"Scripts": [".py", ".js", ".sh", ".bat", ".java", ".cpp", ".c"],
}
def create_folder(folder_path: str) -> None:
"""
Create a folder if it does not already exist.
Args:
folder_path: Path of the folder to create.
"""
if not os.path.exists(folder_path):
os.makedirs(folder_path)
def get_category(file_ext: str) -> str:
"""
Determine the category of a file based on its extension.
Args:
file_ext: File extension (e.g., ".txt").
Returns:
Category name (e.g., "Documents") or "Others" if not matched.
"""
for category, extensions in FILE_CATEGORIES.items():
if file_ext.lower() in extensions:
return category
return "Others"
def organize_files(base_path: str) -> None:
"""
Organize files in the given directory into subfolders by category.
Args:
base_path: Path of the directory to organize.
"""
files = [
f for f in os.listdir(base_path) if os.path.isfile(os.path.join(base_path, f))
]
if not files:
print(f"[{datetime.now().strftime('%H:%M:%S')}] No files found in {base_path}")
return
for file_name in files:
source = os.path.join(base_path, file_name)
file_ext = os.path.splitext(file_name)[1]
category = get_category(file_ext)
target_folder = os.path.join(base_path, category)
create_folder(target_folder)
try:
shutil.move(source, os.path.join(target_folder, file_name))
print(
f"[{datetime.now().strftime('%H:%M:%S')}] Moved: {file_name} -> {category}/"
)
except Exception as e:
print(
f"[{datetime.now().strftime('%H:%M:%S')}] Error moving {file_name}: {e}"
)
def main() -> None:
"""Parse command-line arguments and execute the file organizer."""
parser = argparse.ArgumentParser(
description="Organize files in a directory into categorized subfolders."
)
parser.add_argument("--path", required=True, help="Directory path to organize.")
parser.add_argument(
"--interval",
type=int,
default=0,
help="Interval (in minutes) to repeat automatically (0 = run once).",
)
args = parser.parse_args()
if not os.path.exists(args.path):
print(f"Path not found: {args.path}")
return
print(f"Watching directory: {args.path}")
print("Organizer started. Press Ctrl+C to stop.\n")
try:
while True:
organize_files(args.path)
if args.interval == 0:
break
print(f"Waiting {args.interval} minutes before next run...\n")
time.sleep(args.interval * 60)
except KeyboardInterrupt:
print("\nOrganizer stopped by user.")
if __name__ == "__main__":
main()
| {
"repo_id": "geekcomputers/Python",
"file_path": "smart_file_organizer.py",
"license": "MIT License",
"lines": 103,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:NumPy Array Exponentiation.py | """
NumPy Array Exponentiation
Check if two arrays have the same shape and compute element-wise powers
with and without np.power.
Example usage:
>>> import numpy as np
>>> x = np.array([1, 2])
>>> y = np.array([3, 4])
>>> get_array(x, y) # doctest: +ELLIPSIS
Array of powers without using np.power: [ 1 16]
Array of powers using np.power: [ 1 16]
"""
import numpy as np
def get_array(x: np.ndarray, y: np.ndarray) -> None:
"""
Compute element-wise power of two NumPy arrays if their shapes match.
Parameters
----------
x : np.ndarray
Base array.
y : np.ndarray
Exponent array.
Returns
-------
None
Prints the element-wise powers using both operator ** and np.power.
Example:
>>> import numpy as np
>>> a = np.array([[1, 2], [3, 4]])
>>> b = np.array([[2, 2], [2, 2]])
>>> get_array(a, b) # doctest: +ELLIPSIS
Array of powers without using np.power: [[ 1 4]
[ 9 16]]
Array of powers using np.power: [[ 1 4]
[ 9 16]]
"""
if x.shape == y.shape:
np_pow_array = x**y
print("Array of powers without using np.power: ", np_pow_array)
print("Array of powers using np.power: ", np.power(x, y))
else:
print("Error: Shape of the given arrays is not equal.")
if __name__ == "__main__":
import doctest
doctest.testmod()
# 0D array
np_arr1 = np.array(3)
np_arr2 = np.array(4)
# 1D array
np_arr3 = np.array([1, 2])
np_arr4 = np.array([3, 4])
# 2D array
np_arr5 = np.array([[1, 2], [3, 4]])
np_arr6 = np.array([[5, 6], [7, 8]])
get_array(np_arr1, np_arr2)
print()
get_array(np_arr3, np_arr4)
print()
get_array(np_arr5, np_arr6)
| {
"repo_id": "geekcomputers/Python",
"file_path": "NumPy Array Exponentiation.py",
"license": "MIT License",
"lines": 59,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
geekcomputers/Python:Sanke-water-gun game.py | # author: slayking1965 (refactored for Python 3.13.7 with typing & doctests)
"""
Snake-Water-Gun Game.
Rules:
- Snake vs Water → Snake drinks water → Snake (computer) wins
- Gun vs Water → Gun sinks in water → Water (user) wins
- Snake vs Gun → Gun kills snake → Gun wins
- Same choice → Draw
This module implements a 10-round Snake-Water-Gun game where a user plays
against the computer.
Functions
---------
determine_winner(user: str, computer: str) -> str
Returns result: "user", "computer", or "draw".
Examples
--------
>>> determine_winner("s", "w")
'computer'
>>> determine_winner("w", "g")
'user'
>>> determine_winner("s", "s")
'draw'
"""
import random
import time
from typing import Dict
CHOICES: Dict[str, str] = {"s": "Snake", "w": "Water", "g": "Gun"}
def determine_winner(user: str, computer: str) -> str:
"""
Decide winner of one round.
Parameters
----------
user : str
User's choice ("s", "w", "g").
computer : str
Computer's choice ("s", "w", "g").
Returns
-------
str
"user", "computer", or "draw".
"""
if user == computer:
return "draw"
if user == "s" and computer == "w":
return "computer"
if user == "w" and computer == "s":
return "user"
if user == "g" and computer == "s":
return "user"
if user == "s" and computer == "g":
return "computer"
if user == "w" and computer == "g":
return "user"
if user == "g" and computer == "w":
return "computer"
return "invalid"
def play_game(rounds: int = 10) -> None:
"""
Play Snake-Water-Gun game for given rounds.
Parameters
----------
rounds : int
Number of rounds to play (default 10).
"""
print("Welcome to the Snake-Water-Gun Game\n")
print(f"I am Mr. Computer, We will play this game {rounds} times")
print("Whoever wins more matches will be the winner\n")
user_win = 0
comp_win = 0
draw = 0
round_no = 0
while round_no < rounds:
print(f"Game No. {round_no + 1}")
for key, val in CHOICES.items():
print(f"Choose {key.upper()} for {val}")
comp_choice = random.choice(list(CHOICES.keys()))
user_choice = input("\n-----> ").strip().lower()
result = determine_winner(user_choice, comp_choice)
if result == "user":
user_win += 1
elif result == "computer":
comp_win += 1
elif result == "draw":
draw += 1
else:
print("\nInvalid input, restarting the game...\n")
time.sleep(1)
round_no = 0
user_win = comp_win = draw = 0
continue
round_no += 1
print(f"Computer chose {CHOICES[comp_choice]}")
print(f"You chose {CHOICES.get(user_choice, 'Invalid')}\n")
print("\nHere are final stats:")
print(f"Mr. Computer won: {comp_win} matches")
print(f"You won: {user_win} matches")
print(f"Matches Drawn: {draw}")
if comp_win > user_win:
print("\n------- Mr. Computer won -------")
elif comp_win < user_win:
print("\n----------- You won -----------")
else:
print("\n---------- Match Draw ----------")
if __name__ == "__main__":
import doctest
doctest.testmod()
play_game()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Sanke-water-gun game.py",
"license": "MIT License",
"lines": 107,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:Tic-Tac-Toe Games/tic-tac-toe1.py | """
Text-based Tic-Tac-Toe (2 players).
>>> check_winner([['X','X','X'],[' ',' ',' '],[' ',' ',' ']], 'X')
True
>>> check_winner([['X','O','X'],['O','O','O'],['X',' ',' ']], 'O')
True
>>> check_winner([['X','O','X'],['O','X','O'],['O','X','O']], 'X')
False
>>> is_full([['X','O','X'],['O','X','O'],['O','X','O']])
True
>>> is_full([['X',' ','X'],['O','X','O'],['O','X','O']])
False
"""
from typing import List
Board = List[List[str]]
def print_board(board: Board) -> None:
"""Print the Tic-Tac-Toe board."""
for row in board:
print(" | ".join(row))
print("-" * 9)
def check_winner(board: Board, player: str) -> bool:
"""Return True if `player` has won."""
for i in range(3):
if all(board[i][j] == player for j in range(3)) or all(
board[j][i] == player for j in range(3)
):
return True
if all(board[i][i] == player for i in range(3)) or all(
board[i][2 - i] == player for i in range(3)
):
return True
return False
def is_full(board: Board) -> bool:
"""Return True if the board is full."""
return all(cell != " " for row in board for cell in row)
def get_valid_input(prompt: str) -> int:
"""Get a valid integer input between 0 and 2."""
while True:
try:
value = int(input(prompt))
if 0 <= value < 3:
return value
print("Invalid input: Enter a number between 0 and 2.")
except ValueError:
print("Invalid input: Please enter an integer.")
def main() -> None:
"""Run the text-based Tic-Tac-Toe game."""
board: Board = [[" " for _ in range(3)] for _ in range(3)]
player = "X"
while True:
print_board(board)
print(f"Player {player}'s turn:")
row = get_valid_input("Enter row (0-2): ")
col = get_valid_input("Enter col (0-2): ")
if board[row][col] == " ":
board[row][col] = player
if check_winner(board, player):
print_board(board)
print(f"Player {player} wins!")
break
if is_full(board):
print_board(board)
print("It's a draw!")
break
player = "O" if player == "X" else "X"
else:
print("Invalid move: Spot taken. Try again.")
if __name__ == "__main__":
import doctest
doctest.testmod()
main()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Tic-Tac-Toe Games/tic-tac-toe1.py",
"license": "MIT License",
"lines": 71,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:Tic-Tac-Toe Games/tic-tac-toe2.py | """
Tic-Tac-Toe Console Game
Two players (X and O) take turns to mark a 3x3 grid until one wins
or the game ends in a draw.
Doctest Examples:
>>> test_board = [" "] * 10
>>> check_position(test_board, 1)
True
>>> test_board[1] = "X"
>>> check_position(test_board, 1)
False
"""
import os
import time
from typing import List
# Global Variables
board: List[str] = [" "] * 10 # 1-based indexing
player: int = 1
Win: int = 1
Draw: int = -1
Running: int = 0
Game: int = Running
def draw_board() -> None:
"""Print the current state of the Tic-Tac-Toe board."""
print(f" {board[1]} | {board[2]} | {board[3]}")
print("___|___|___")
print(f" {board[4]} | {board[5]} | {board[6]}")
print("___|___|___")
print(f" {board[7]} | {board[8]} | {board[9]}")
print(" | | ")
def check_position(b: List[str], pos: int) -> bool:
"""
Check if the board position is empty.
Args:
b (List[str]): Board
pos (int): Position 1-9
Returns:
bool: True if empty, False if occupied.
>>> b = [" "] * 10
>>> check_position(b, 1)
True
>>> b[1] = "X"
>>> check_position(b, 1)
False
"""
return b[pos] == " "
def check_win() -> None:
"""Evaluate the board and update the global Game status."""
global Game
# Winning combinations
combos = [
(1, 2, 3),
(4, 5, 6),
(7, 8, 9),
(1, 4, 7),
(2, 5, 8),
(3, 6, 9),
(1, 5, 9),
(3, 5, 7),
]
for a, b, c in combos:
if board[a] == board[b] == board[c] != " ":
Game = Win
return
if all(board[i] != " " for i in range(1, 10)):
Game = Draw
else:
Game = Running
def main() -> None:
"""Run the Tic-Tac-Toe game in the console."""
global player
print("Tic-Tac-Toe Game Designed By Sourabh Somani")
print("Player 1 [X] --- Player 2 [O]\n\nPlease Wait...")
time.sleep(2)
while Game == Running:
os.system("cls" if os.name == "nt" else "clear")
draw_board()
mark = "X" if player % 2 != 0 else "O"
print(f"Player {1 if mark == 'X' else 2}'s chance")
try:
choice = int(input("Enter position [1-9] to mark: "))
except ValueError:
print("Invalid input! Enter an integer between 1-9.")
time.sleep(2)
continue
if choice < 1 or choice > 9:
print("Invalid position! Choose between 1-9.")
time.sleep(2)
continue
if check_position(board, choice):
board[choice] = mark
player += 1
check_win()
else:
print("Position already taken! Try another.")
time.sleep(2)
os.system("cls" if os.name == "nt" else "clear")
draw_board()
if Game == Draw:
print("Game Draw")
elif Game == Win:
player_won = 1 if (player - 1) % 2 != 0 else 2
print(f"Player {player_won} Won!")
if __name__ == "__main__":
import doctest
doctest.testmod()
main()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Tic-Tac-Toe Games/tic-tac-toe2.py",
"license": "MIT License",
"lines": 107,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:Tic-Tac-Toe Games/tic-tac-toe3.py | """
Tic-Tac-Toe with AI (Minimax) using CustomTkinter.
Player = "X", AI = "O". Click a button to play.
>>> check_winner([['X','X','X'],[' ',' ',' '],[' ',' ',' ']], 'X')
True
>>> check_winner([['X','O','X'],['O','O','O'],['X',' ',' ']], 'O')
True
>>> check_winner([['X','O','X'],['O','X','O'],['O','X','O']], 'X')
False
"""
from typing import List, Optional, Tuple
import customtkinter as ctk
from tkinter import messagebox
Board = List[List[str]]
def check_winner(board: Board, player: str) -> bool:
"""Check if `player` has a winning line on `board`."""
for i in range(3):
if all(board[i][j] == player for j in range(3)) or all(
board[j][i] == player for j in range(3)
):
return True
if all(board[i][i] == player for i in range(3)) or all(
board[i][2 - i] == player for i in range(3)
):
return True
return False
def is_board_full(board: Board) -> bool:
"""Return True if all cells are filled."""
return all(all(cell != " " for cell in row) for row in board)
def minimax(board: Board, depth: int, is_max: bool) -> int:
"""Minimax algorithm for AI evaluation."""
if check_winner(board, "X"):
return -1
if check_winner(board, "O"):
return 1
if is_board_full(board):
return 0
if is_max:
val = float("-inf")
for i in range(3):
for j in range(3):
if board[i][j] == " ":
board[i][j] = "O"
val = max(val, minimax(board, depth + 1, False))
board[i][j] = " "
return val
else:
val = float("inf")
for i in range(3):
for j in range(3):
if board[i][j] == " ":
board[i][j] = "X"
val = min(val, minimax(board, depth + 1, True))
board[i][j] = " "
return val
def best_move(board: Board) -> Optional[Tuple[int, int]]:
"""Return best move for AI."""
best_val = float("-inf")
move: Optional[Tuple[int, int]] = None
for i in range(3):
for j in range(3):
if board[i][j] == " ":
board[i][j] = "O"
val = minimax(board, 0, False)
board[i][j] = " "
if val > best_val:
best_val = val
move = (i, j)
return move
def make_move(row: int, col: int) -> None:
"""Human move and AI response."""
if board[row][col] != " ":
messagebox.showerror("Error", "Invalid move")
return
board[row][col] = "X"
buttons[row][col].configure(text="X")
if check_winner(board, "X"):
messagebox.showinfo("Tic-Tac-Toe", "You win!")
root.quit()
elif is_board_full(board):
messagebox.showinfo("Tic-Tac-Toe", "Draw!")
root.quit()
else:
ai_move()
def ai_move() -> None:
"""AI makes a move."""
move = best_move(board)
if move is None:
return
r, c = move
board[r][c] = "O"
buttons[r][c].configure(text="O")
if check_winner(board, "O"):
messagebox.showinfo("Tic-Tac-Toe", "AI wins!")
root.quit()
elif is_board_full(board):
messagebox.showinfo("Tic-Tac-Toe", "Draw!")
root.quit()
# --- Initialize GUI ---
root = ctk.CTk()
root.title("Tic-Tac-Toe")
board: Board = [[" "] * 3 for _ in range(3)]
buttons: List[List[ctk.CTkButton]] = []
for i in range(3):
row_buttons: List[ctk.CTkButton] = []
for j in range(3):
btn = ctk.CTkButton(
root,
text=" ",
font=("normal", 30),
width=100,
height=100,
command=lambda r=i, c=j: make_move(r, c),
)
btn.grid(row=i, column=j, padx=2, pady=2)
row_buttons.append(btn)
buttons.append(row_buttons)
if __name__ == "__main__":
import doctest
doctest.testmod()
root.mainloop()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Tic-Tac-Toe Games/tic-tac-toe3.py",
"license": "MIT License",
"lines": 121,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:Tic-Tac-Toe Games/tic-tac-toe4.py | """
Tic-Tac-Toe Game using NumPy and random moves.
Two players (1 and 2) randomly take turns until one wins or board is full.
Doctests:
>>> b = create_board()
>>> all(b.flatten() == 0)
True
>>> len(possibilities(b))
9
>>> row_win(np.array([[1,1,1],[0,0,0],[0,0,0]]), 1)
True
>>> col_win(np.array([[2,0,0],[2,0,0],[2,0,0]]), 2)
True
>>> diag_win(np.array([[1,0,0],[0,1,0],[0,0,1]]), 1)
True
>>> evaluate(np.array([[1,1,1],[0,0,0],[0,0,0]]))
1
>>> evaluate(np.array([[1,2,1],[2,1,2],[2,1,2]]))
-1
"""
import numpy as np
import random
from time import sleep
from typing import List, Tuple
def create_board() -> np.ndarray:
"""Return an empty 3x3 Tic-Tac-Toe board."""
return np.zeros((3, 3), dtype=int)
def possibilities(board: np.ndarray) -> List[Tuple[int, int]]:
"""Return list of empty positions on the board."""
return [(i, j) for i in range(3) for j in range(3) if board[i, j] == 0]
def random_place(board: np.ndarray, player: int) -> np.ndarray:
"""Place player number randomly on an empty position."""
selection = possibilities(board)
current_loc = random.choice(selection)
board[current_loc] = player
return board
def row_win(board: np.ndarray, player: int) -> bool:
"""Check if player has a complete row."""
return any(all(board[x, y] == player for y in range(3)) for x in range(3))
def col_win(board: np.ndarray, player: int) -> bool:
"""Check if player has a complete column."""
return any(all(board[y, x] == player for y in range(3)) for x in range(3))
def diag_win(board: np.ndarray, player: int) -> bool:
"""Check if player has a complete diagonal."""
if all(board[i, i] == player for i in range(3)):
return True
if all(board[i, 2 - i] == player for i in range(3)):
return True
return False
def evaluate(board: np.ndarray) -> int:
"""
Evaluate the board.
Returns:
0 if no winner yet,
1 or 2 for the winner,
-1 if tie.
"""
for player in [1, 2]:
if row_win(board, player) or col_win(board, player) or diag_win(board, player):
return player
if np.all(board != 0):
return -1
return 0
def play_game() -> int:
"""Play a full random Tic-Tac-Toe game and return the winner."""
board, winner, counter = create_board(), 0, 1
print("Initial board:\n", board)
sleep(1)
while winner == 0:
for player in [1, 2]:
board = random_place(board, player)
print(f"\nBoard after move {counter} by Player {player}:\n{board}")
sleep(1)
counter += 1
winner = evaluate(board)
if winner != 0:
break
return winner
if __name__ == "__main__":
import doctest
doctest.testmod()
winner = play_game()
if winner == -1:
print("\nThe game is a tie!")
else:
print(f"\nWinner is: Player {winner}")
| {
"repo_id": "geekcomputers/Python",
"file_path": "Tic-Tac-Toe Games/tic-tac-toe4.py",
"license": "MIT License",
"lines": 86,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:Tic-Tac-Toe Games/tic-tac-toe5.py | """
Tic-Tac-Toe Game with Full Type Hints and Doctests.
Two-player game where Player and Computer take turns.
Player chooses X or O and Computer takes the opposite.
Doctests examples:
>>> is_winner([' ', 'X','X','X',' ',' ',' ',' ',' ',' '], 'X')
True
>>> is_space_free([' ', 'X',' ',' ',' ',' ',' ',' ',' ',' '], 1)
False
>>> is_space_free([' ']*10, 5)
True
>>> choose_random_move_from_list([' ']*10, [1,2,3]) in [1,2,3]
True
"""
import random
from typing import List, Optional, Tuple
def introduction() -> None:
"""Print game introduction."""
print("Welcome to Tic Tac Toe!")
print("Player is X, Computer is O.")
print("Board positions 1-9 (bottom-left to top-right).")
def draw_board(board: List[str]) -> None:
"""Display the current board."""
print(" | |")
print(f" {board[7]} | {board[8]} | {board[9]}")
print(" | |")
print("-------------")
print(" | |")
print(f" {board[4]} | {board[5]} | {board[6]}")
print(" | |")
print("-------------")
print(" | |")
print(f" {board[1]} | {board[2]} | {board[3]}")
print(" | |")
def input_player_letter() -> Tuple[str, str]:
"""
Let player choose X or O.
Returns tuple (player_letter, computer_letter).
"""
letter: str = ""
while letter not in ("X", "O"):
print("Do you want to be X or O? ")
letter = input("> ").upper()
return ("X", "O") if letter == "X" else ("O", "X")
def first_player() -> str:
"""Randomly decide who goes first."""
return "Computer" if random.randint(0, 1) == 0 else "Player"
def play_again() -> bool:
"""Ask the player if they want to play again."""
print("Do you want to play again? (y/n)")
return input().lower().startswith("y")
def make_move(board: List[str], letter: str, move: int) -> None:
"""Place the letter on the board at the given position."""
board[move] = letter
def is_winner(board: List[str], le: str) -> bool:
"""
Return True if the given letter has won the game.
>>> is_winner([' ', 'X','X','X',' ',' ',' ',' ',' ',' '], 'X')
True
>>> is_winner([' ']*10, 'O')
False
"""
return (
(board[7] == le and board[8] == le and board[9] == le)
or (board[4] == le and board[5] == le and board[6] == le)
or (board[1] == le and board[2] == le and board[3] == le)
or (board[7] == le and board[4] == le and board[1] == le)
or (board[8] == le and board[5] == le and board[2] == le)
or (board[9] == le and board[6] == le and board[3] == le)
or (board[7] == le and board[5] == le and board[3] == le)
or (board[9] == le and board[5] == le and board[1] == le)
)
def get_board_copy(board: List[str]) -> List[str]:
"""Return a copy of the board."""
return [b for b in board]
def is_space_free(board: List[str], move: int) -> bool:
"""
Return True if a position on the board is free.
>>> is_space_free([' ', 'X',' ',' ',' ',' ',' ',' ',' ',' '], 1)
False
>>> is_space_free([' ']*10, 5)
True
"""
return board[move] == " "
def get_player_move(board: List[str]) -> int:
"""Get the player's next valid move."""
move: str = " "
while move not in "1 2 3 4 5 6 7 8 9".split() or not is_space_free(
board, int(move)
):
print("What is your next move? (1-9)")
move = input()
return int(move)
def choose_random_move_from_list(
board: List[str], moves_list: List[int]
) -> Optional[int]:
"""
Return a valid move from a list randomly.
>>> choose_random_move_from_list([' ']*10, [1,2,3]) in [1,2,3]
True
>>> choose_random_move_from_list(['X']*10, [1,2,3])
"""
possible_moves = [i for i in moves_list if is_space_free(board, i)]
return random.choice(possible_moves) if possible_moves else None
def get_computer_move(board: List[str], computer_letter: str) -> int:
"""Return the computer's best move."""
player_letter = "O" if computer_letter == "X" else "X"
# Try to win
for i in range(1, 10):
copy = get_board_copy(board)
if is_space_free(copy, i):
make_move(copy, computer_letter, i)
if is_winner(copy, computer_letter):
return i
# Block player's winning move
for i in range(1, 10):
copy = get_board_copy(board)
if is_space_free(copy, i):
make_move(copy, player_letter, i)
if is_winner(copy, player_letter):
return i
# Try corners
move = choose_random_move_from_list(board, [1, 3, 7, 9])
if move is not None:
return move
# Take center
if is_space_free(board, 5):
return 5
# Try sides
return choose_random_move_from_list(board, [2, 4, 6, 8]) # type: ignore
def is_board_full(board: List[str]) -> bool:
"""Return True if the board has no free spaces."""
return all(not is_space_free(board, i) for i in range(1, 10))
def main() -> None:
"""Main game loop."""
introduction()
while True:
the_board: List[str] = [" "] * 10
player_letter, computer_letter = input_player_letter()
turn = first_player()
print(f"{turn} goes first.")
game_is_playing = True
while game_is_playing:
if turn.lower() == "player":
draw_board(the_board)
move = get_player_move(the_board)
make_move(the_board, player_letter, move)
if is_winner(the_board, player_letter):
draw_board(the_board)
print("Hooray! You have won the game!")
game_is_playing = False
elif is_board_full(the_board):
draw_board(the_board)
print("The game is a tie!")
break
else:
turn = "computer"
else:
move = get_computer_move(the_board, computer_letter)
make_move(the_board, computer_letter, move)
if is_winner(the_board, computer_letter):
draw_board(the_board)
print("Computer has won. You Lose.")
game_is_playing = False
elif is_board_full(the_board):
draw_board(the_board)
print("The game is a tie!")
break
else:
turn = "player"
if not play_again():
break
if __name__ == "__main__":
import doctest
doctest.testmod()
main()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Tic-Tac-Toe Games/tic-tac-toe5.py",
"license": "MIT License",
"lines": 176,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:Tic-Tac-Toe Games/tic-tac-toe6.py | """
Tic-Tac-Toe Series Game
Two players can play multiple rounds of Tic-Tac-Toe.
Keeps score across rounds until players quit.
Doctest examples:
>>> check_win({"X": [1, 2, 3], "O": []}, "X")
True
>>> check_win({"X": [1, 2], "O": []}, "X")
False
>>> check_draw({"X": [1, 2, 3], "O": [4, 5, 6]})
False
>>> check_draw({"X": [1, 2, 3, 4, 5], "O": [6, 7, 8, 9]})
True
"""
from typing import List, Dict
def print_tic_tac_toe(values: List[str]) -> None:
"""Print the current Tic-Tac-Toe board."""
print("\n")
print("\t | |")
print("\t {} | {} | {}".format(values[0], values[1], values[2]))
print("\t_____|_____|_____")
print("\t | |")
print("\t {} | {} | {}".format(values[3], values[4], values[5]))
print("\t_____|_____|_____")
print("\t | |")
print("\t {} | {} | {}".format(values[6], values[7], values[8]))
print("\t | |")
print("\n")
def print_scoreboard(score_board: Dict[str, int]) -> None:
"""Print the current score-board."""
print("\t--------------------------------")
print("\t SCOREBOARD ")
print("\t--------------------------------")
players = list(score_board.keys())
print(f"\t {players[0]} \t {score_board[players[0]]}")
print(f"\t {players[1]} \t {score_board[players[1]]}")
print("\t--------------------------------\n")
def check_win(player_pos: Dict[str, List[int]], cur_player: str) -> bool:
"""
Check if the current player has won.
Args:
player_pos: Dict of player positions (X and O)
cur_player: Current player ("X" or "O")
Returns:
True if player wins, False otherwise
>>> check_win({"X": [1,2,3], "O": []}, "X")
True
>>> check_win({"X": [1,2], "O": []}, "X")
False
"""
soln = [
[1, 2, 3],
[4, 5, 6],
[7, 8, 9], # Rows
[1, 4, 7],
[2, 5, 8],
[3, 6, 9], # Columns
[1, 5, 9],
[3, 5, 7], # Diagonals
]
return any(all(pos in player_pos[cur_player] for pos in combo) for combo in soln)
def check_draw(player_pos: Dict[str, List[int]]) -> bool:
"""
Check if the game is drawn (all positions filled).
Args:
player_pos: Dict of player positions (X and O)
Returns:
True if game is a draw, False otherwise
>>> check_draw({"X": [1,2,3], "O": [4,5,6]})
False
>>> check_draw({"X": [1,2,3,4,5], "O": [6,7,8,9]})
True
"""
return len(player_pos["X"]) + len(player_pos["O"]) == 9
def single_game(cur_player: str) -> str:
"""Run a single game of Tic-Tac-Toe."""
values: List[str] = [" " for _ in range(9)]
player_pos: Dict[str, List[int]] = {"X": [], "O": []}
while True:
print_tic_tac_toe(values)
try:
move = int(input(f"Player {cur_player} turn. Which box? : "))
except ValueError:
print("Wrong Input!!! Try Again")
continue
if move < 1 or move > 9:
print("Wrong Input!!! Try Again")
continue
if values[move - 1] != " ":
print("Place already filled. Try again!!")
continue
# Update board
values[move - 1] = cur_player
player_pos[cur_player].append(move)
if check_win(player_pos, cur_player):
print_tic_tac_toe(values)
print(f"Player {cur_player} has won the game!!\n")
return cur_player
if check_draw(player_pos):
print_tic_tac_toe(values)
print("Game Drawn\n")
return "D"
cur_player = "O" if cur_player == "X" else "X"
def main() -> None:
"""Run a series of Tic-Tac-Toe games."""
player1 = input("Player 1, Enter the name: ")
player2 = input("Player 2, Enter the name: ")
cur_player = player1
player_choice: Dict[str, str] = {"X": "", "O": ""}
options: List[str] = ["X", "O"]
score_board: Dict[str, int] = {player1: 0, player2: 0}
print_scoreboard(score_board)
while True:
print(f"Turn to choose for {cur_player}")
print("Enter 1 for X")
print("Enter 2 for O")
print("Enter 3 to Quit")
try:
choice = int(input())
except ValueError:
print("Wrong Input!!! Try Again\n")
continue
if choice == 1:
player_choice["X"] = cur_player
player_choice["O"] = player2 if cur_player == player1 else player1
elif choice == 2:
player_choice["O"] = cur_player
player_choice["X"] = player2 if cur_player == player1 else player1
elif choice == 3:
print("Final Scores")
print_scoreboard(score_board)
break
else:
print("Wrong Choice!!!! Try Again\n")
continue
winner = single_game(options[choice - 1])
if winner != "D":
score_board[player_choice[winner]] += 1
print_scoreboard(score_board)
cur_player = player2 if cur_player == player1 else player1
if __name__ == "__main__":
import doctest
doctest.testmod()
main()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Tic-Tac-Toe Games/tic-tac-toe6.py",
"license": "MIT License",
"lines": 144,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:password guessing.py | # Author: Slayking1965
# Email: kingslayer8509@gmail.com
"""
Brute-force password guessing demonstration.
This script simulates guessing a password using random choices from
printable characters. It is a conceptual demonstration and is **not
intended for real-world password cracking**.
Example usage (simulated):
>>> import random
>>> random.seed(0)
>>> password = "abc"
>>> chars_list = list("abc")
>>> guess = random.choices(chars_list, k=len(password))
>>> guess # doctest: +ELLIPSIS
['a', 'c', 'b']...
"""
import random
import string
from typing import List
def guess_password_simulation(password: str) -> str:
"""
Attempt to guess a password using random choices from printable chars.
Parameters
----------
password : str
The password to guess.
Returns
-------
str
The correctly guessed password.
Example:
>>> random.seed(1)
>>> guess_password_simulation("abc") # doctest: +ELLIPSIS
'abc'
"""
chars_list: List[str] = list(string.printable)
guess: List[str] = []
attempts = 0
while guess != list(password):
guess = random.choices(chars_list, k=len(password))
attempts += 1
print(f"<== Attempt {attempts}: {''.join(guess)} ==>")
print("Password guessed successfully!")
return "".join(guess)
if __name__ == "__main__":
import doctest
import pyautogui
doctest.testmod()
# Prompt user for password safely
user_password: str = pyautogui.password("Enter a password: ")
if user_password:
guess_password_simulation(user_password)
| {
"repo_id": "geekcomputers/Python",
"file_path": "password guessing.py",
"license": "MIT License",
"lines": 52,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
geekcomputers/Python:simple_calculator.py | """
Simple Calculator Module.
Provides basic operations: add, subtract, multiply, divide.
Example usage:
>>> add(2, 3)
5
>>> subtract(10, 4)
6
>>> multiply(3, 4)
12
>>> divide(8, 2)
4.0
"""
def add(x: float, y: float) -> float:
"""Return the sum of x and y."""
return x + y
def subtract(x: float, y: float) -> float:
"""Return the difference of x and y."""
return x - y
def multiply(x: float, y: float) -> float:
"""Return the product of x and y."""
return x * y
def divide(x: float, y: float) -> float:
"""Return the quotient of x divided by y."""
return x / y
def calculator() -> None:
"""Run a simple calculator in the console."""
print("Select operation.")
print("1.Add\n2.Subtract\n3.Multiply\n4.Divide")
while True:
choice: str = input("Enter choice (1/2/3/4): ").strip()
if choice in ("1", "2", "3", "4"):
num1: float = float(input("Enter first number: "))
num2: float = float(input("Enter second number: "))
if choice == "1":
print(f"{num1} + {num2} = {add(num1, num2)}")
elif choice == "2":
print(f"{num1} - {num2} = {subtract(num1, num2)}")
elif choice == "3":
print(f"{num1} * {num2} = {multiply(num1, num2)}")
elif choice == "4":
print(f"{num1} / {num2} = {divide(num1, num2)}")
break
else:
print("Invalid Input. Please select 1, 2, 3, or 4.")
if __name__ == "__main__":
import doctest
doctest.testmod()
calculator()
| {
"repo_id": "geekcomputers/Python",
"file_path": "simple_calculator.py",
"license": "MIT License",
"lines": 49,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:to check leap year.py | """
Leap Year Checker.
Determine whether a given year is a leap year.
Doctests:
>>> is_leap_year(2000)
True
>>> is_leap_year(1900)
False
>>> is_leap_year(2024)
True
>>> is_leap_year(2023)
False
"""
def is_leap_year(year: int) -> bool:
"""
Return True if year is a leap year, False otherwise.
Rules:
- Divisible by 4 => leap year
- Divisible by 100 => not leap year
- Divisible by 400 => leap year
"""
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
if __name__ == "__main__":
import doctest
doctest.testmod()
year_input = input("Enter a year: ").strip()
try:
year = int(year_input)
if is_leap_year(year):
print(f"{year} is a leap year")
else:
print(f"{year} is not a leap year")
except ValueError:
print("Invalid input! Please enter a valid integer year.")
| {
"repo_id": "geekcomputers/Python",
"file_path": "to check leap year.py",
"license": "MIT License",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
geekcomputers/Python:BrowserHistory/rock_paper_scissors.py | """
Triple Round : Rock, Paper, Scissors Game (CLI Version)
Final round is the Winning Round
Author: Your Name
"""
import random
def get_user_choice():
"""Prompt the user to enter their choice."""
choice = input("Enter your choice (rock, paper, scissors): ").lower()
if choice in ["rock", "paper", "scissors"]:
return choice
else:
print("Invalid choice! Please enter rock, paper, or scissors.")
return get_user_choice()
def get_computer_choice():
"""Randomly select computer's choice."""
options = ["rock", "paper", "scissors"]
return random.choice(options)
def decide_winner(player, computer):
"""Decide the winner based on the choices."""
if player == computer:
return "It's a draw!"
elif (
(player == "rock" and computer == "scissors")
or (player == "paper" and computer == "rock")
or (player == "scissors" and computer == "paper")
):
return "You win!"
else:
return "Computer wins!"
def main():
"""Main function to play the game."""
for i in range(1, 4):
print(f"round -> {i}\n")
user_choice = get_user_choice()
computer_choice = get_computer_choice()
print(f"Computer chose: {computer_choice}")
print(f"Final result : {decide_winner(user_choice, computer_choice)}")
if __name__ == "__main__":
main()
| {
"repo_id": "geekcomputers/Python",
"file_path": "BrowserHistory/rock_paper_scissors.py",
"license": "MIT License",
"lines": 40,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:rock_paper_scissors.py | """
Rock, Paper, Scissors Game
Author: DEVANSH-GAJJAR
"""
import random
def get_user_choice():
"""Prompt the user to enter their choice."""
choice = input("Enter your choice (rock, paper, scissors): ").lower()
if choice in ["rock", "paper", "scissors"]:
return choice
else:
print("Invalid choice! Please enter rock, paper, or scissors.")
return get_user_choice()
def get_computer_choice():
"""Randomly select computer's choice."""
options = ["rock", "paper", "scissors"]
return random.choice(options)
def decide_winner(player, computer):
"""Decide the winner based on the choices."""
if player == computer:
return "It's a draw!"
elif (
(player == "rock" and computer == "scissors")
or (player == "paper" and computer == "rock")
or (player == "scissors" and computer == "paper")
):
return "You win!"
else:
return "Computer wins!"
def main():
"""Main function to play the game."""
user_choice = get_user_choice()
computer_choice = get_computer_choice()
print(f"Computer chose: {computer_choice}")
print(decide_winner(user_choice, computer_choice))
if __name__ == "__main__":
main()
| {
"repo_id": "geekcomputers/Python",
"file_path": "rock_paper_scissors.py",
"license": "MIT License",
"lines": 37,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Collatz Sequence/Collatz Sequence.py | def collatz_sequence(n):
"""Generate and print the Collatz sequence for n."""
steps = [n]
while n != 1:
if n % 2 == 0:
n = n // 2
else:
n = 3 * n + 1
steps.append(n)
return steps
# --- Main Program ---
try:
num = int(input("Enter a positive integer: "))
if num <= 0:
print("Please enter a positive number greater than 0.")
else:
sequence = collatz_sequence(num)
print("\nCollatz sequence:")
for i, value in enumerate(sequence, start=1):
print(f"Step {i}: {value}")
except ValueError:
print("Invalid input! Please enter an integer.")
| {
"repo_id": "geekcomputers/Python",
"file_path": "Collatz Sequence/Collatz Sequence.py",
"license": "MIT License",
"lines": 22,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:news_oversimplifier.py | # news_oversimplifier.py
# Python command-line tool that fetches recent news articles based on a search query using NewsAPI and summarizes the article content using extractive summarization. You can also save the summaries to a text file.
# (requires API key in .env file)
import requests
import os
import sys
from dotenv import load_dotenv
from summa.summarizer import summarize
def main():
# loads .env variables
load_dotenv()
API_KEY = os.getenv("NEWS_API_KEY")
# check validity of command-line arguments
try:
if len(sys.argv) == 2:
news_query = sys.argv[1]
else:
raise IndexError()
except IndexError:
sys.exit("Please provide correct number of command-line arguments")
try:
# get number of articles from user
while True:
try:
num_articles = int(input("Enter number of articles: "))
break
except ValueError:
continue
# fetch news articles based on user's query
articles = fetch_news(API_KEY, query=news_query, max_articles=num_articles)
# output printing title, summary and no. of words in the summary
for i, article in enumerate(articles):
capitalized_title = capitalize_title(article["title"])
print(f"\n{i + 1}. {capitalized_title}")
content = article.get("content") or article.get("description") or ""
if not content.strip():
print("No content to oversimplify.")
continue
summary = summarize_text(content) # returns summary
count = word_count(summary) # returns word count
print(f"\nOVERSIMPLIFIED:\n{summary}\n{count} words\n")
# ask user whether they want to save the output in a txt file
while True:
saving_status = (
input("Would you like to save this in a text file? (y/n): ")
.strip()
.lower()
)
if saving_status == "y":
save_summary(article["title"], summary)
break
elif saving_status == "n":
break
else:
print("Try again\n")
continue
except Exception as e:
print("ERROR:", e)
def word_count(text): # pytest in test file
"""
Returns the number of words in the given text.
args:
text (str): Input string to count words from.
returns:
int: Number of words in the string.
"""
return len(text.split())
def summarize_text(text, ratio=0.6): # pytest in test file
"""
Summarizes the given text using the summa library.
args:
text (str): The input text to summarize.
ratio (float): Ratio of the original text to retain in the summary.
returns:
str: The summarized text, or a fallback message if intro is present or summary is empty.
"""
summary = summarize(text, ratio=ratio)
if summary.lower().startswith("hello, and welcome to decoder!"):
return "No description available for this headline"
else:
return summary.strip() if summary else text
def capitalize_title(title): # pytest in test file
"""
Capitalizes all letters in a given article title.
args:
title (str): The title to format.
returns:
str: Title in uppercase with surrounding spaces removed.
"""
return title.upper().strip()
def fetch_news(api_key, query, max_articles=5): # no pytest
"""
Fetches a list of news articles from NewsAPI based on a query string.
args:
api_key (str): NewsAPI key loaded from environment.
query (str): The keyword to search for in news articles.
max_articles (int): Maximum number of articles to fetch.
returns:
list: List of dictionaries, each representing a news article.
raises:
Exception: If the API response status is not 'ok'.
"""
url = f"https://newsapi.org/v2/everything?q={query}&language=en&apiKey={api_key}&pageSize={max_articles}"
response = requests.get(url)
data = response.json()
if data.get("status") != "ok":
raise Exception("Failed to fetch news:", data.get("message"))
return data["articles"]
def save_summary(title, summary, path="summaries.txt"): # no pytest
"""
Appends a formatted summary to a file along with its title.
args:
title (str): Title of the article.
summary (str): Summarized text to save.
path (str): File path to save the summary, i.e. 'summaries.txt'
"""
with open(path, "a", encoding="utf-8") as f:
f.write(f"{title}\n{summary}\n{'=' * 60}\n")
if __name__ == "__main__":
main()
| {
"repo_id": "geekcomputers/Python",
"file_path": "news_oversimplifier.py",
"license": "MIT License",
"lines": 120,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:palindrome.py | def is_palindrome(text):
text = text.lower()
cleaned = ""
for char in text:
if char.isalnum():
cleaned += char
reversed_text = cleaned[::-1]
return cleaned == reversed_text
user_input = input("Enter a word or a sentence:")
if is_palindrome(user_input):
print("It's a palindrome")
else:
print("It's not a palindrome")
| {
"repo_id": "geekcomputers/Python",
"file_path": "palindrome.py",
"license": "MIT License",
"lines": 13,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:random_password_gen.py | """
random_password_gen.py
A script to generate strong random passwords.
Usage:
$ python random_password_gen.py
Author: Keshavraj Pore
"""
import random
import string
def generate_password(length=12):
characters = string.ascii_letters + string.digits + string.punctuation
password = "".join(random.choice(characters) for _ in range(length))
return password
def main():
print("Random Password Generator")
try:
length = int(input("Enter desired password length: "))
if length < 6:
print(" Password length should be at least 6.")
return
password = generate_password(length)
print(f"\nGenerated Password: {password}")
# Save to file
with open("passwords.txt", "a") as file:
file.write(password + "\n")
print(" Password saved to passwords.txt")
except ValueError:
print(" Please enter a valid number.")
if __name__ == "__main__":
main()
| {
"repo_id": "geekcomputers/Python",
"file_path": "random_password_gen.py",
"license": "MIT License",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Trending youtube videos.py | """
Python program that uses the YouTube Data API to fetch the top 10 trending YouTube videos.
You’ll need to have an API key from Google Cloud Platform to use the YouTube Data API.
First, install the google-api-python-client library if you haven’t already:
pip install google-api-python-client
Replace 'YOUR_API_KEY' with your actual API key. This script will fetch and print the titles,
channels, and view counts of the top 10 trending YouTube videos in India.
You can change the regionCode to any other country code if needed.
Then, you can use the following code:
"""
from googleapiclient.discovery import build
# Replace with your own API key
API_KEY = "YOUR_API_KEY"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def get_trending_videos():
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=API_KEY)
# Call the API to get the top 10 trending videos
request = youtube.videos().list(
part="snippet,statistics",
chart="mostPopular",
regionCode="IN", # Change this to your region code
maxResults=10,
)
response = request.execute()
# Print the video details
for item in response["items"]:
title = item["snippet"]["title"]
channel = item["snippet"]["channelTitle"]
views = item["statistics"]["viewCount"]
print(f"Title: {title}\nChannel: {channel}\nViews: {views}\n")
if __name__ == "__main__":
get_trending_videos()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Trending youtube videos.py",
"license": "MIT License",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Audio_Summarizer.py | import whisper
import re
import openai
import os
def transcript_generator():
# Load Whisper model
model = whisper.load_model("base")
# Transcribe audio file
result = model.transcribe("audio.mp4")
# Send the transcript to the summarizer
provide_summarizer(result)
def provide_summarizer(Text):
# Set up Groq OpenAI-compatible API credentials
openai.api_key = os.getenv(
"OPENAI_API_KEY", "your-api-key-here"
) # Replace or set in environment
openai.api_base = "https://api.groq.com/openai/v1"
# Extract text from the Whisper result
text_to_summarize = Text["text"]
# Send the transcription to Groq for summarization
response = openai.ChatCompletion.create(
model="llama3-8b-8192",
messages=[
{
"role": "system",
"content": "You are a helpful assistant who summarizes long text into bullet points.",
},
{
"role": "user",
"content": f"Summarize the following:\n\n{text_to_summarize}",
},
],
)
# Split the response into sentences
summary = re.split(r"(?<=[.!?]) +", response["choices"][0]["message"]["content"])
# Save summary to file
with open("summary.txt", "w+", encoding="utf-8") as file:
for sentence in summary:
cleaned = sentence.strip()
if cleaned:
file.write("- " + cleaned + "\n")
if __name__ == "__main__":
transcript_generator()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Audio_Summarizer.py",
"license": "MIT License",
"lines": 43,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Street_Fighter/src/fighter.py | import pygame
class Fighter:
def __init__(self, player, x, y, flip, data, sprite_sheet, animation_steps, sound):
self.player = player
self.size = data[0]
self.image_scale = data[1]
self.offset = data[2]
self.flip = flip
self.animation_list = self.load_images(sprite_sheet, animation_steps)
self.action = 0 # 0:idle #1:run #2:jump #3:attack1 #4: attack2 #5:hit #6:death
self.frame_index = 0
self.image = self.animation_list[self.action][self.frame_index]
self.update_time = pygame.time.get_ticks()
self.rect = pygame.Rect((x, y, 80, 180))
self.vel_y = 0
self.running = False
self.jump = False
self.attacking = False
self.attack_type = 0
self.attack_cooldown = 0
self.attack_sound = sound
self.hit = False
self.health = 100
self.alive = True
def load_images(self, sprite_sheet, animation_steps):
# extract images from spritesheet
animation_list = []
for y, animation in enumerate(animation_steps):
temp_img_list = []
for x in range(animation):
temp_img = sprite_sheet.subsurface(
x * self.size, y * self.size, self.size, self.size
)
temp_img_list.append(
pygame.transform.scale(
temp_img,
(self.size * self.image_scale, self.size * self.image_scale),
)
)
animation_list.append(temp_img_list)
return animation_list
def move(self, screen_width, screen_height, target, round_over):
SPEED = 10
GRAVITY = 2
dx = 0
dy = 0
self.running = False
self.attack_type = 0
# get keypresses
key = pygame.key.get_pressed()
# can only perform other actions if not currently attacking
if self.attacking == False and self.alive == True and round_over == False:
# check player 1 controls
if self.player == 1:
# movement
if key[pygame.K_a]:
dx = -SPEED
self.running = True
if key[pygame.K_d]:
dx = SPEED
self.running = True
# jump
if key[pygame.K_w] and self.jump == False:
self.vel_y = -30
self.jump = True
# attack
if key[pygame.K_r] or key[pygame.K_t]:
self.attack(target)
# determine which attack type was used
if key[pygame.K_r]:
self.attack_type = 1
if key[pygame.K_t]:
self.attack_type = 2
# check player 2 controls
if self.player == 2:
# movement
if key[pygame.K_LEFT]:
dx = -SPEED
self.running = True
if key[pygame.K_RIGHT]:
dx = SPEED
self.running = True
# jump
if key[pygame.K_UP] and self.jump == False:
self.vel_y = -30
self.jump = True
# attack
if key[pygame.K_m] or key[pygame.K_n]:
self.attack(target)
# determine which attack type was used
if key[pygame.K_m]:
self.attack_type = 1
if key[pygame.K_n]:
self.attack_type = 2
# apply gravity
self.vel_y += GRAVITY
dy += self.vel_y
# ensure player stays on screen
if self.rect.left + dx < 0:
dx = -self.rect.left
if self.rect.right + dx > screen_width:
dx = screen_width - self.rect.right
if self.rect.bottom + dy > screen_height - 110:
self.vel_y = 0
self.jump = False
dy = screen_height - 110 - self.rect.bottom
# ensure players face each other
if target.rect.centerx > self.rect.centerx:
self.flip = False
else:
self.flip = True
# apply attack cooldown
if self.attack_cooldown > 0:
self.attack_cooldown -= 1
# update player position
self.rect.x += dx
self.rect.y += dy
# handle animation updates
def update(self):
# check what action the player is performing
if self.health <= 0:
self.health = 0
self.alive = False
self.update_action(6) # 6:death
elif self.hit:
self.update_action(5) # 5:hit
elif self.attacking:
if self.attack_type == 1:
self.update_action(3) # 3:attack1
elif self.attack_type == 2:
self.update_action(4) # 4:attack2
elif self.jump:
self.update_action(2) # 2:jump
elif self.running:
self.update_action(1) # 1:run
else:
self.update_action(0) # 0:idle
animation_cooldown = 50
# update image
self.image = self.animation_list[self.action][self.frame_index]
# check if enough time has passed since the last update
if pygame.time.get_ticks() - self.update_time > animation_cooldown:
self.frame_index += 1
self.update_time = pygame.time.get_ticks()
# check if the animation has finished
if self.frame_index >= len(self.animation_list[self.action]):
# if the player is dead then end the animation
if not self.alive:
self.frame_index = len(self.animation_list[self.action]) - 1
else:
self.frame_index = 0
# check if an attack was executed
if self.action == 3 or self.action == 4:
self.attacking = False
self.attack_cooldown = 20
# check if damage was taken
if self.action == 5:
self.hit = False
# if the player was in the middle of an attack, then the attack is stopped
self.attacking = False
self.attack_cooldown = 20
def attack(self, target):
if self.attack_cooldown == 0:
# execute attack
self.attacking = True
self.attack_sound.play()
attacking_rect = pygame.Rect(
self.rect.centerx - (2 * self.rect.width * self.flip),
self.rect.y,
2 * self.rect.width,
self.rect.height,
)
if attacking_rect.colliderect(target.rect):
target.health -= 10
target.hit = True
def update_action(self, new_action):
# check if the new action is different to the previous one
if new_action != self.action:
self.action = new_action
# update the animation settings
self.frame_index = 0
self.update_time = pygame.time.get_ticks()
def draw(self, surface):
img = pygame.transform.flip(self.image, self.flip, False)
surface.blit(
img,
(
self.rect.x - (self.offset[0] * self.image_scale),
self.rect.y - (self.offset[1] * self.image_scale),
),
)
| {
"repo_id": "geekcomputers/Python",
"file_path": "Street_Fighter/src/fighter.py",
"license": "MIT License",
"lines": 191,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:Street_Fighter/src/main.py | import math
import pygame
from pygame import mixer
import cv2
import numpy as np
import os
import sys
from fighter import Fighter
# Helper Function for Bundled Assets
def resource_path(relative_path):
try:
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath(".")
return os.path.join(base_path, relative_path)
mixer.init()
pygame.init()
# Constants
info = pygame.display.Info()
SCREEN_WIDTH = info.current_w
SCREEN_HEIGHT = info.current_h
FPS = 60
ROUND_OVER_COOLDOWN = 3000
# Colors
RED = (255, 0, 0)
YELLOW = (255, 255, 0)
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
BLUE = (0, 0, 255)
GREEN = (0, 255, 0)
# Initialize Game Window
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT), pygame.NOFRAME)
pygame.display.set_caption("Street Fighter")
clock = pygame.time.Clock()
# Load Assets
bg_image = cv2.imread(resource_path("assets/images/bg1.jpg"))
victory_img = pygame.image.load(
resource_path("assets/images/victory.png")
).convert_alpha()
warrior_victory_img = pygame.image.load(
resource_path("assets/images/warrior.png")
).convert_alpha()
wizard_victory_img = pygame.image.load(
resource_path("assets/images/wizard.png")
).convert_alpha()
# Fonts
menu_font = pygame.font.Font(resource_path("assets/fonts/turok.ttf"), 50)
menu_font_title = pygame.font.Font(
resource_path("assets/fonts/turok.ttf"), 100
) # Larger font for title
count_font = pygame.font.Font(resource_path("assets/fonts/turok.ttf"), 80)
score_font = pygame.font.Font(resource_path("assets/fonts/turok.ttf"), 30)
# Music and Sounds
pygame.mixer.music.load(resource_path("assets/audio/music.mp3"))
pygame.mixer.music.set_volume(0.5)
pygame.mixer.music.play(-1, 0.0, 5000)
sword_fx = pygame.mixer.Sound(resource_path("assets/audio/sword.wav"))
sword_fx.set_volume(0.5)
magic_fx = pygame.mixer.Sound(resource_path("assets/audio/magic.wav"))
magic_fx.set_volume(0.75)
# Load Fighter Spritesheets
warrior_sheet = pygame.image.load(
resource_path("assets/images/warrior.png")
).convert_alpha()
wizard_sheet = pygame.image.load(
resource_path("assets/images/wizard.png")
).convert_alpha()
# Define Animation Steps
WARRIOR_ANIMATION_STEPS = [10, 8, 1, 7, 7, 3, 7]
WIZARD_ANIMATION_STEPS = [8, 8, 1, 8, 8, 3, 7]
# Fighter Data
WARRIOR_SIZE = 162
WARRIOR_SCALE = 4
WARRIOR_OFFSET = [72, 46]
WARRIOR_DATA = [WARRIOR_SIZE, WARRIOR_SCALE, WARRIOR_OFFSET]
WIZARD_SIZE = 250
WIZARD_SCALE = 3
WIZARD_OFFSET = [112, 97]
WIZARD_DATA = [WIZARD_SIZE, WIZARD_SCALE, WIZARD_OFFSET]
# Game Variables
score = [0, 0] # Player Scores: [P1, P2]
def draw_text(text, font, color, x, y):
img = font.render(text, True, color)
screen.blit(img, (x, y))
def blur_bg(image):
image_bgr = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
blurred_image = cv2.GaussianBlur(image_bgr, (15, 15), 0)
return cv2.cvtColor(blurred_image, cv2.COLOR_BGR2RGB)
def draw_bg(image, is_game_started=False):
if not is_game_started:
blurred_bg = blur_bg(image)
blurred_bg = pygame.surfarray.make_surface(np.transpose(blurred_bg, (1, 0, 2)))
blurred_bg = pygame.transform.scale(blurred_bg, (SCREEN_WIDTH, SCREEN_HEIGHT))
screen.blit(blurred_bg, (0, 0))
else:
image = pygame.surfarray.make_surface(np.transpose(image, (1, 0, 2)))
image = pygame.transform.scale(image, (SCREEN_WIDTH, SCREEN_HEIGHT))
screen.blit(image, (0, 0))
def draw_button(text, font, text_col, button_col, x, y, width, height):
pygame.draw.rect(screen, button_col, (x, y, width, height))
pygame.draw.rect(screen, WHITE, (x, y, width, height), 2)
text_img = font.render(text, True, text_col)
text_rect = text_img.get_rect(center=(x + width // 2, y + height // 2))
screen.blit(text_img, text_rect)
return pygame.Rect(x, y, width, height)
def victory_screen(winner_img):
start_time = pygame.time.get_ticks()
while pygame.time.get_ticks() - start_time < ROUND_OVER_COOLDOWN:
resized_victory_img = pygame.transform.scale(
victory_img, (victory_img.get_width() * 2, victory_img.get_height() * 2)
)
screen.blit(
resized_victory_img,
(
SCREEN_WIDTH // 2 - resized_victory_img.get_width() // 2,
SCREEN_HEIGHT // 2 - resized_victory_img.get_height() // 2 - 50,
),
)
screen.blit(
winner_img,
(
SCREEN_WIDTH // 2 - winner_img.get_width() // 2,
SCREEN_HEIGHT // 2 - winner_img.get_height() // 2 + 100,
),
)
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit()
def draw_gradient_text(text, font, x, y, colors):
"""
Draws a gradient text by layering multiple text surfaces with slight offsets.
"""
offset = 2
for i, color in enumerate(colors):
img = font.render(text, True, color)
screen.blit(img, (x + i * offset, y + i * offset))
def main_menu():
animation_start_time = pygame.time.get_ticks()
while True:
draw_bg(bg_image, is_game_started=False)
elapsed_time = (pygame.time.get_ticks() - animation_start_time) / 1000
scale_factor = 1 + 0.05 * math.sin(elapsed_time * 2 * math.pi) # Slight scaling
scaled_font = pygame.font.Font(
"assets/fonts/turok.ttf", int(100 * scale_factor)
)
title_text = "STREET FIGHTER"
colors = [BLUE, GREEN, YELLOW]
shadow_color = BLACK
title_x = SCREEN_WIDTH // 2 - scaled_font.size(title_text)[0] // 2
title_y = SCREEN_HEIGHT // 6
shadow_offset = 5
draw_text(
title_text,
scaled_font,
shadow_color,
title_x + shadow_offset,
title_y + shadow_offset,
)
draw_gradient_text(title_text, scaled_font, title_x, title_y, colors)
button_width = 280
button_height = 60
button_spacing = 30
start_button_y = (
SCREEN_HEIGHT // 2 - (button_height + button_spacing) * 1.5 + 50
)
scores_button_y = (
SCREEN_HEIGHT // 2 - (button_height + button_spacing) * 0.5 + 50
)
exit_button_y = SCREEN_HEIGHT // 2 + (button_height + button_spacing) * 0.5 + 50
start_button = draw_button(
"START GAME",
menu_font,
BLACK,
GREEN,
SCREEN_WIDTH // 2 - button_width // 2,
start_button_y,
button_width,
button_height,
)
scores_button = draw_button(
"SCORES",
menu_font,
BLACK,
GREEN,
SCREEN_WIDTH // 2 - button_width // 2,
scores_button_y,
button_width,
button_height,
)
exit_button = draw_button(
"EXIT",
menu_font,
BLACK,
GREEN,
SCREEN_WIDTH // 2 - button_width // 2,
exit_button_y,
button_width,
button_height,
)
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if start_button.collidepoint(event.pos):
return "START"
if scores_button.collidepoint(event.pos):
return "SCORES"
if exit_button.collidepoint(event.pos):
pygame.quit()
exit()
pygame.display.update()
clock.tick(FPS)
def scores_screen():
while True:
draw_bg(bg_image)
scores_title = "SCORES"
draw_text(
scores_title,
menu_font_title,
RED,
SCREEN_WIDTH // 2 - menu_font_title.size(scores_title)[0] // 2,
50,
)
score_font_large = pygame.font.Font(
"assets/fonts/turok.ttf", 60
) # Increased size for scores
p1_text = f"P1: {score[0]}"
p2_text = f"P2: {score[1]}"
shadow_offset = 5
p1_text_x = SCREEN_WIDTH // 2 - score_font_large.size(p1_text)[0] // 2
p1_text_y = SCREEN_HEIGHT // 2 - 50
draw_text(
p1_text,
score_font_large,
BLACK,
p1_text_x + shadow_offset,
p1_text_y + shadow_offset,
) # Shadow
draw_gradient_text(
p1_text, score_font_large, p1_text_x, p1_text_y, [BLUE, GREEN]
) # Gradient
p2_text_x = SCREEN_WIDTH // 2 - score_font_large.size(p2_text)[0] // 2
p2_text_y = SCREEN_HEIGHT // 2 + 50
draw_text(
p2_text,
score_font_large,
BLACK,
p2_text_x + shadow_offset,
p2_text_y + shadow_offset,
) # Shadow
draw_gradient_text(
p2_text, score_font_large, p2_text_x, p2_text_y, [RED, YELLOW]
) # Gradient
return_button = draw_button(
"RETURN TO MAIN MENU",
menu_font,
BLACK,
GREEN,
SCREEN_WIDTH // 2 - 220,
700,
500,
50,
)
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if return_button.collidepoint(event.pos):
return
pygame.display.update()
clock.tick(FPS)
def reset_game():
global fighter_1, fighter_2
fighter_1 = Fighter(
1,
200,
310,
False,
WARRIOR_DATA,
warrior_sheet,
WARRIOR_ANIMATION_STEPS,
sword_fx,
)
fighter_2 = Fighter(
2, 700, 310, True, WIZARD_DATA, wizard_sheet, WIZARD_ANIMATION_STEPS, magic_fx
)
def draw_health_bar(health, x, y):
pygame.draw.rect(screen, BLACK, (x, y, 200, 20))
if health > 0:
pygame.draw.rect(screen, RED, (x, y, health * 2, 20))
pygame.draw.rect(screen, WHITE, (x, y, 200, 20), 2)
def countdown():
countdown_font = pygame.font.Font("assets/fonts/turok.ttf", 100)
countdown_texts = ["3", "2", "1", "FIGHT!"]
for text in countdown_texts:
draw_bg(bg_image, is_game_started=True)
text_img = countdown_font.render(text, True, RED)
text_width = text_img.get_width()
x_pos = (SCREEN_WIDTH - text_width) // 2
draw_text(text, countdown_font, RED, x_pos, SCREEN_HEIGHT // 2 - 50)
pygame.display.update()
pygame.time.delay(1000)
def game_loop():
global score
reset_game()
round_over = False
winner_img = None
game_started = True
countdown()
while True:
draw_bg(bg_image, is_game_started=game_started)
draw_text(f"P1: {score[0]}", score_font, RED, 20, 20)
draw_text(f"P2: {score[1]}", score_font, RED, SCREEN_WIDTH - 220, 20)
draw_health_bar(fighter_1.health, 20, 50)
draw_health_bar(fighter_2.health, SCREEN_WIDTH - 220, 50)
exit_button = draw_button(
"MAIN MENU", menu_font, BLACK, YELLOW, SCREEN_WIDTH // 2 - 150, 20, 300, 50
)
if not round_over:
fighter_1.move(SCREEN_WIDTH, SCREEN_HEIGHT, fighter_2, round_over)
fighter_2.move(SCREEN_WIDTH, SCREEN_HEIGHT, fighter_1, round_over)
fighter_1.update()
fighter_2.update()
if not fighter_1.alive:
score[1] += 1
round_over = True
winner_img = wizard_victory_img
elif not fighter_2.alive:
score[0] += 1
round_over = True
winner_img = warrior_victory_img
else:
victory_screen(winner_img)
return
fighter_1.draw(screen)
fighter_2.draw(screen)
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if exit_button.collidepoint(event.pos):
return
pygame.display.update()
clock.tick(FPS)
while True:
menu_selection = main_menu()
if menu_selection == "START":
game_loop()
elif menu_selection == "SCORES":
scores_screen()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Street_Fighter/src/main.py",
"license": "MIT License",
"lines": 355,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:Emoji Dictionary/QT_GUI.py | # -*- coding: utf-8 -*-
import sys
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5 import uic
from emoji import demojize
import os
class MainWindow(QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
# Load the UI file
uic.loadUi(os.path.join(os.path.dirname(__file__), "QT_GUI.ui"), self)
self.pushButton_4.clicked.connect(self.close)
self.pushButton_2.clicked.connect(lambda: search_emoji())
self.pushButton_3.clicked.connect(lambda: clear_text())
cells = [
[
"🐒",
"🐕",
"🐎",
"🐪",
"🐁",
"🐘",
"🦘",
"🦈",
"🐓",
"🐝",
"👀",
"🦴",
"👩🏿",
"🤝",
"🧑",
"🏾",
"👱🏽",
"♀",
"🎞",
"🎨",
"⚽",
],
[
"🍕",
"🍗",
"🍜",
"☕",
"🍴",
"🍉",
"🍓",
"🌴",
"🌵",
"🛺",
"🚲",
"🛴",
"🚉",
"🚀",
"✈",
"🛰",
"🚦",
"🏳",
"🌈",
"🌎",
"🧭",
],
[
"🔥",
"❄",
"🌟",
"🌞",
"🌛",
"🌝",
"🌧",
"🧺",
"🧷",
"🪒",
"⛲",
"🗼",
"🕌",
"👁",
"🗨",
"💬",
"™",
"💯",
"🔕",
"💥",
"❤",
],
["😀", "🥰", "😴", "🤓", "🤮", "🤬", "😨", "🤑", "😫", "😎"],
]
def emoji_wight_btn():
if self.emoji_widget.isVisible():
self.emoji_widget.hide()
else:
self.emoji_widget.show()
def search_emoji():
word = self.lineEdit.text()
print(f"Field Text: {word}")
if word == "":
self.textEdit.setText("You have entered no emoji.")
else:
means = demojize(word)
self.textEdit.setText(
"Meaning of Emoji : "
+ str(word)
+ "\n\n"
+ means.replace("::", ":\n: ")
)
def add_input_emoji(emoji):
self.lineEdit.setText(self.lineEdit.text() + emoji)
def clear_text():
self.lineEdit.setText("")
self.textEdit.setText("")
self.emoji_buttons = []
self.emoji_layout = QGridLayout()
self.emoji_widget = QWidget()
self.emoji_widget.setLayout(self.emoji_layout)
self.frame_2.layout().addWidget(self.emoji_widget)
self.emoji_widget.hide()
self.pushButton.clicked.connect(lambda: emoji_wight_btn())
for row_idx, row in enumerate(cells):
for col_idx, emoji in enumerate(row):
button = QPushButton(emoji)
button.setFixedSize(40, 40)
button.setFont(QFont("Arial", 20))
button.setStyleSheet("""
QPushButton {
background-color: #ffffff;
border: 1px solid #e0e0e0;
border-radius: 5px;
}
QPushButton:hover {
background-color: #f0f0f0;
}
""")
button.clicked.connect(lambda checked, e=emoji: add_input_emoji(e))
self.emoji_layout.addWidget(button, row_idx, col_idx)
self.emoji_buttons.append(button)
if __name__ == "__main__":
app = QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_())
| {
"repo_id": "geekcomputers/Python",
"file_path": "Emoji Dictionary/QT_GUI.py",
"license": "MIT License",
"lines": 141,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:CSV_file.py | import pandas as pd
# loading the dataset
df = pd.read_csv(
r"c:\PROJECT\Drug_Recommendation_System\drug_recommendation_system\Drugs_Review_Datasets.csv"
)
print(df) # prints Dataset
# funtions
print(df.tail())
print(df.head())
print(df.info())
print(df.describe())
print(df.column)
print(df.shape())
| {
"repo_id": "geekcomputers/Python",
"file_path": "CSV_file.py",
"license": "MIT License",
"lines": 13,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Image-watermarker/app.py | import customtkinter as ctk
from customtkinter import filedialog
from CTkMessagebox import CTkMessagebox
from PIL import Image, ImageTk
from watermark import Watermark
import pyglet
from tkinter import colorchooser
# ------------------- Create Window -----------------
pyglet.font.add_directory("fonts")
window = ctk.CTk()
window.geometry("810x525")
window.title("Grenze")
text_label = None
loaded_image = False
logo = None
img = None
user_text = None
logo_path = None
color_code = "white"
font_values = ["Decorative", "MartianMono", "DancingScript", "AkayaKanadaka"]
# -------------------------- LOAD IMAGE AND CHECK FILE TYPE ON IMAGE CANVAS (use Frame) --------------
def load_image():
global img, loaded_image, image_canvas
file_path = filedialog.askopenfilename(
filetypes=[("Image files", "*.jpg *.jpeg *.png *.bmp")]
)
if not file_path:
return
img = Image.open(file_path)
max_width, max_height = 800, 600
if img.width > max_width or img.height > max_height:
ratio = min(max_width / img.width, max_height / img.height)
resize_img = img.resize(
(int(img.width * ratio), int(img.height * ratio)), Image.Resampling.LANCZOS
)
loaded_image = ImageTk.PhotoImage(resize_img)
window.geometry(f"{resize_img.width + 300 + 30}x{resize_img.height + 50}")
image_canvas.config(width=resize_img.width, height=resize_img.height)
image_canvas.grid(row=0, column=1, padx=20, pady=20, columnspan=2)
image_canvas.create_image(0, 0, anchor="nw", image=loaded_image)
else:
loaded_image = ImageTk.PhotoImage(img)
window.geometry(f"{img.width + 300}x{img.height + 50}")
image_canvas.config(width=img.width, height=img.height)
image_canvas.grid(row=0, column=1, padx=20, pady=20, columnspan=2)
image_canvas.create_image(0, 0, anchor="nw", image=loaded_image)
# ------------------------------------- DRAG AND DROP FEATURE --------
start_x = 0
start_y = 0
new_x = 0
new_y = 0
def move_logo(e):
global logo, new_x, new_y
canvas_width = image_canvas.winfo_width()
canvas_height = image_canvas.winfo_height()
label_width = image_canvas.bbox(logo)[2] - image_canvas.bbox(logo)[0]
label_height = image_canvas.bbox(logo)[3] - image_canvas.bbox(logo)[1]
new_x = e.x
new_y = e.y
if new_x < 0:
new_x = 0
elif new_x + label_width > canvas_width:
new_x = canvas_width - label_width
if new_y < 0:
new_y = 0
elif new_y + label_height > canvas_height:
new_y = canvas_height - label_height
image_canvas.coords(logo, new_x, new_y)
def move_text(e):
global text_label, new_x, new_y
canvas_width = image_canvas.winfo_width()
canvas_height = image_canvas.winfo_height()
label_width = image_canvas.bbox(text_label)[2] - image_canvas.bbox(text_label)[0]
label_height = image_canvas.bbox(text_label)[3] - image_canvas.bbox(text_label)[1]
new_x = e.x
new_y = e.y
if new_x < 0:
new_x = 0
elif new_x + label_width > canvas_width:
new_x = canvas_width - label_width
if new_y < 0:
new_y = 0
elif new_y + label_height > canvas_height:
new_y = canvas_height - label_height
image_canvas.coords(text_label, new_x, new_y)
def choose_color():
global color_code
choose_color = colorchooser.askcolor(title="Choose Color")
color_code = choose_color[1]
# ----------------- ADD TEXT ON CANVAS-----------------
def add_text_on_canvas():
global text_label, loaded_image, user_text, img, font_values
user_text = text.get()
font_key = font_style.get()
if font_key not in font_values:
CTkMessagebox(
title="Font Not Available",
message=f"{font_key} FileNotFoundError.",
)
return
if logo is not None:
CTkMessagebox(title="Logo Use", message="Logo is in use.")
return
if text_label is not None:
image_canvas.delete(text_label) # Delete previous text_label
if loaded_image:
if user_text:
selected_size = int(font_size.get())
pyglet.font.add_file(f"fonts/{font_key}.ttf")
text_label = image_canvas.create_text(
10,
10,
text=user_text,
font=(font_key, selected_size),
fill=color_code,
anchor="nw",
)
image_canvas.tag_bind(text_label, "<B1-Motion>", move_text)
else:
CTkMessagebox(title="Error", message="Text Filed Empty.", icon="cancel")
else:
CTkMessagebox(title="Error", message="Image Not Found. Upload Image.")
# ----------------------TODO UPLOAD LOGO -----------
def upload_logo():
global loaded_image, logo, logo_path, text_label
if text_label is not None:
CTkMessagebox(
title="Text In Use", message="You are using text. Can't use logo."
)
return
if logo is not None:
image_canvas.delete(logo)
if loaded_image:
logo_path = filedialog.askopenfilename(
filetypes=[("Image files", "*.jpg *.jpeg *.png *.bmp")],
)
if logo_path:
logo_image = Image.open(logo_path).convert("RGBA")
resize = logo_image.resize((160, 150))
logo_photo = ImageTk.PhotoImage(resize)
logo = image_canvas.create_image(0, 0, anchor="nw", image=logo_photo)
image_canvas.tag_bind(logo, "<B1-Motion>", move_logo)
image_canvas.logo_photo = logo_photo
else:
CTkMessagebox(
title="Image Field Empty",
message="Image field empty. Click on the open image button to add the image to the canvas.",
icon="cancel",
)
# ---------------------------- TODO SAVE FUNCTION ---------------
watermark = Watermark()
def save_image():
global text_label, loaded_image, file_path, user_text, img, new_x, new_y, logo
if loaded_image and text_label:
width, height = img.size
canvas_width = image_canvas.winfo_width()
canvas_height = image_canvas.winfo_height()
scale_x = width / canvas_width
scale_y = height / canvas_height
image_x = int(new_x * scale_x) - 10
image_y = int(new_y * scale_y) - 10
adjusted_font_size = int(int(font_size.get()) * min(scale_x, scale_y)) + 6
watermarked_image = watermark.add_text_watermark(
image=img,
text=user_text,
position=(image_x, image_y),
text_color=color_code,
font_style=f"fonts/{font_style.get()}.ttf",
font_size=adjusted_font_size,
)
watermark.save_image(watermarked_image)
elif loaded_image and logo_path is not None:
original_image = img.convert("RGBA")
canvas_width = image_canvas.winfo_width()
canvas_height = image_canvas.winfo_height()
logo_image = Image.open(logo_path)
logo_resized = logo_image.resize(
(
int(original_image.width * 0.2) + 50,
int(original_image.height * 0.2),
)
)
image_width, image_height = original_image.size
logo_position = (
int(new_x * image_width / canvas_width),
int(new_y * image_height / canvas_height),
)
watermark.add_logo(
image=original_image, logo=logo_resized, position=logo_position
)
watermark.save_image(original_image)
# -------------------Tab View AND OPEN IMAGE-----------
tabview = ctk.CTkTabview(window, corner_radius=10, height=400)
tabview.grid(row=0, column=0, padx=10)
tab_1 = tabview.add("Text Watermark")
tab_2 = tabview.add("Logo Watermark")
# --------------- TEXT WATERMARK TAB_1 VIEW ----------
tab_1.grid_columnconfigure(0, weight=1)
tab_1.grid_columnconfigure(1, weight=1)
text = ctk.CTkEntry(master=tab_1, placeholder_text="Entry Text", width=200)
text.grid(row=2, column=0, padx=20, pady=10)
font_style = ctk.CTkComboBox(
master=tab_1,
values=font_values,
width=200,
)
font_style.grid(row=3, column=0, pady=10)
font_size = ctk.CTkComboBox(
master=tab_1,
values=[
"10",
"12",
"14",
"20",
],
width=200,
)
font_size.grid(row=4, column=0, pady=10)
font_size.set("10")
add_text = ctk.CTkButton(
master=tab_1, text="Add Text", width=200, command=add_text_on_canvas
)
add_text.grid(row=5, column=0, pady=10)
open_image = ctk.CTkButton(
master=tab_1, text="Open Image", width=200, corner_radius=10, command=load_image
)
open_image.grid(row=7, column=0, pady=10)
open_image2 = ctk.CTkButton(
master=tab_2, text="Open Image", width=200, corner_radius=10, command=load_image
)
open_image2.grid(row=2, column=0, padx=20, pady=10)
pick_color = ctk.CTkButton(
master=tab_1, text="Pick Color", width=200, corner_radius=10, command=choose_color
)
pick_color.grid(row=6, column=0, padx=10, pady=10)
# ------------- LOGO WATERMARK SESSION TAB_2 ---------------
logo_upload = ctk.CTkButton(
master=tab_2, text="Upload Logo", width=200, corner_radius=10, command=upload_logo
)
logo_upload.grid(row=3, column=0, pady=10)
# ----------------- ImageFrame ---------------------
image_canvas = ctk.CTkCanvas(
width=500,
height=360,
)
image_canvas.config(bg="gray24", highlightthickness=0, borderwidth=0)
image_canvas.grid(row=0, column=1, columnspan=2)
# -------- SAVE BUTTON --------
save_image_button = ctk.CTkButton(window, text="Save Image", command=save_image)
save_image_button.grid(pady=10)
window.mainloop()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Image-watermarker/app.py",
"license": "MIT License",
"lines": 251,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:Image-watermarker/watermark.py | from PIL import ImageDraw, ImageFont
from customtkinter import filedialog
from CTkMessagebox import CTkMessagebox
class Watermark:
def __init__(self):
pass
def add_text_watermark(
self, image, text, text_color, font_style, font_size, position=(0, 0)
):
font = ImageFont.truetype(font_style, font_size)
draw = ImageDraw.Draw(image)
draw.text(position, text, fill=text_color, font=font)
return image
def add_logo(self, image, logo, position=(0, 0)):
if logo.mode != "RGBA":
logo = logo.convert("RGBA")
if image.mode != "RGBA":
image = image.convert("RGBA")
if (position[0] + logo.width > image.width) or (
position[1] + logo.height > image.height
):
CTkMessagebox(title="Logo position", message="Logo position out of bounds.")
image.paste(logo, position, mask=logo)
return image
def save_image(self, image):
save_path = filedialog.asksaveasfilename(
defaultextension="*.png",
title="Save as",
filetypes=[
("PNG files", "*.png"),
("All files", "*.*"),
],
)
if save_path:
try:
image.save(save_path)
except Exception:
print("Failed to save image: {e}")
| {
"repo_id": "geekcomputers/Python",
"file_path": "Image-watermarker/watermark.py",
"license": "MIT License",
"lines": 38,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Pc_information.py | import platform # built in lib
print(f"System : {platform.system()}") # Prints type of Operating System
print(f"System name : {platform.node()}") # Prints System Name
print(f"version : {platform.release()}") # Prints System Version
# TO get the detailed version number
print(
f"detailed version number : {platform.version()}"
) # Prints detailed version number
print(
f"System architecture : {platform.machine()}"
) # Prints whether the system is 32-bit ot 64-bit
print(f"System processor : {platform.processor()}") # Prints CPU model
| {
"repo_id": "geekcomputers/Python",
"file_path": "Pc_information.py",
"license": "MIT License",
"lines": 12,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:Todo_GUi.py | from tkinter import messagebox
import tkinter as tk
# Function to be called when button is clicked
def add_Button():
task = Input.get()
if task:
List.insert(tk.END, task)
Input.delete(0, tk.END)
def del_Button():
try:
task = List.curselection()[0]
List.delete(task)
except IndexError:
messagebox.showwarning("Selection Error", "Please select a task to delete.")
# Create the main window
window = tk.Tk()
window.title("Task Manager")
window.geometry("500x500")
window.resizable(False, False)
window.config(bg="light grey")
# text filed
Input = tk.Entry(window, width=50)
Input.grid(row=0, column=0, padx=20, pady=60)
Input.focus()
# Create the button
add = tk.Button(window, text="ADD TASK", height=2, width=9, command=add_Button)
add.grid(row=0, column=1, padx=20, pady=0)
delete = tk.Button(window, text="DELETE TASK", height=2, width=10, command=del_Button)
delete.grid(row=1, column=1)
# creating list box
List = tk.Listbox(window, width=50, height=20)
List.grid(row=1, column=0)
window.mainloop()
| {
"repo_id": "geekcomputers/Python",
"file_path": "Todo_GUi.py",
"license": "MIT License",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:basic_cal.py | while True:
try:
print(eval(input("enter digits with operator (e.g. 5+5)\n")))
except:
print("Invalid Input, try again..")
# Simple Calculator using eval() in Python
# This calculator takes user input like "5+5" or "10/2" and shows the result.
| {
"repo_id": "geekcomputers/Python",
"file_path": "basic_cal.py",
"license": "MIT License",
"lines": 7,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:large_files_reading.py | with open(
"new_project.txt", "r", encoding="utf-8"
) as file: # replace "largefile.text" with your actual file name or with absoulte path
# encoding = "utf-8" is especially used when the file contains special characters....
for f in file:
print(f.strip())
| {
"repo_id": "geekcomputers/Python",
"file_path": "large_files_reading.py",
"license": "MIT License",
"lines": 6,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:loops.py | # 2 loops
# for loop:
"""
Syntax..
-> "range" : starts with 0.
-> The space after the space is called as identiation, python generally identifies the block of code with the help of indentation,
indentation is generally 4 spaces / 1 tab space..
for <variable> in range(<enter the range>):
statements you want to execute
for <varaible> in <list name>:
print(<variable>)
To print the list / or any iterator items
"""
# 1. for with range...
for i in range(3):
print("Hello... with range")
# prints Hello 3 times..
# 2.for with list
l1 = [1, 2, 3, 78, 98, 56, 52]
for i in l1:
print("list items", i)
# prints list items one by one....
for i in "ABC":
print(i)
# while loop:
i = 0
while i <= 5:
print("hello.. with while")
i += 1
| {
"repo_id": "geekcomputers/Python",
"file_path": "loops.py",
"license": "MIT License",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:multiple_comditions.py | while True:
try:
user = int(input("enter any number b/w 1-3\n"))
if user == 1:
print("in first if")
elif user == 2:
print("in second if")
elif user == 3:
print("in third if")
else:
print("Enter numbers b/w the range of 1-3")
except:
print("enter only digits")
"""
## Why we are using elif instead of nested if ?
When you have multiple conditions to check, using nested if means that if the first condition is true, the program still checks the second
if condition, even though it's already decided that the first condition worked. This makes the program do more work than necessary.
On the other hand, when you use elif, if one condition is satisfied, the program exits the rest of the conditions and doesn't continue checking.
It’s more efficient and clean, as it immediately moves to the correct option without unnecessary steps.
"""
| {
"repo_id": "geekcomputers/Python",
"file_path": "multiple_comditions.py",
"license": "MIT License",
"lines": 20,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:reading_csv.py | import pandas as pd
# reading csv file into python
df = pd.read_csv(
r"c:\PROJECT\Drug_Recommendation_System\drug_recommendation_system\Drugs_Review_Datasets.csv"
) # Replace the path with your own file path
print(df)
# Basic functions
print(df.info()) # Provides a short summary of the DataFrame
print(df.head()) # prints first 5 rows
print(df.tail()) # prints last 5 rows
print(df.describe()) # statistical summary of numeric columns
print(df.columns) # Returns column names
print(df.shape) # Returns the number of rows and columnsrr
print(
help(pd)
) # Use help(pd) to explore and understand the available functions and attributes in the pandas (pd) lib
| {
"repo_id": "geekcomputers/Python",
"file_path": "reading_csv.py",
"license": "MIT License",
"lines": 16,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:saving_input_into_list.py | ran = int(input("Enter the range of elements you want to store / insert "))
l1 = []
for i in range(ran):
l1.append(input("Enter here "))
print(l1)
"""
program first asks the user how many values they want to enter. Then, using a loop, it lets the user enter that many values one by one.
Each entered value is saved into a list called l1. Once all the values are entered, the program prints the complete list, showing
everything the user typed. It's a beginner-friendly way to learn how to collect multiple inputs and store them for later use.
"""
| {
"repo_id": "geekcomputers/Python",
"file_path": "saving_input_into_list.py",
"license": "MIT License",
"lines": 10,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
geekcomputers/Python:scientific_cal.py | import math
while True:
print("""
Press 1 for basic calculator
Press 2 for scientifc calculator""")
try:
cho = int(input("enter your choice here.. "))
if cho == 1:
print(eval(input("enter the numbers with operator ")))
elif cho == 2:
user = int(
input("""
Press 1 for pi calculation
press 2 for sin calculation
press 3 for exponent calculation
press 4 for tangent calculation
press 5 for square root calculation
press 6 round calculation
press 7 for absoulte value
press any other number to exit the loop. """)
)
a = float(input("enter your value here.. "))
if user == 1:
print(f"entered value : {a} result :{math.pi * (a)}")
elif user == 2:
print(f"entered value : {a} result :{math.sin(math.radians(a))}")
elif user == 3:
power = float(input("enter the power"))
print(f"entered value : {a} result :{a**power}")
elif user == 4:
angle_in_radians = math.radians(a)
result = math.tan(angle_in_radians)
print(f"entered value : {a} result :{result}")
elif user == 5:
print(f"entered value : {a} result :{math.sqrt(a)}")
elif user == 6:
print(f"entered value : {a} result :{round(a)}")
elif user == 7:
print(f"entered value : {a} result :{abs(a)}")
else:
break
except ZeroDivisionError:
print("value cannot be divided by 0")
except:
print("Enter only digits ")
| {
"repo_id": "geekcomputers/Python",
"file_path": "scientific_cal.py",
"license": "MIT License",
"lines": 45,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
geekcomputers/Python:string_palin.py | #
# With slicing -> Reverses the string using string[::-1]
string = input("enter a word to check.. ")
copy = string[::-1]
if string == copy:
print("Plaindrome")
else:
print("!")
# Without slicing –> Reverses the string manually using a loop
reverse_string = ""
for i in string:
reverse_string = i + reverse_string
if string == reverse_string:
print(reverse_string)
else:
print("!")
| {
"repo_id": "geekcomputers/Python",
"file_path": "string_palin.py",
"license": "MIT License",
"lines": 16,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
geekcomputers/Python:bank_managment_system/QTFrontend.py | from PyQt5 import QtCore, QtGui, QtWidgets
import sys
import backend
backend.connect_database()
employee_data = None
# Page Constants (for reference)
HOME_PAGE = 0
ADMIN_PAGE = 1
EMPLOYEE_PAGE = 2
ADMIN_MENU_PAGE = 3
ADD_EMPLOYEE_PAGE = 4
UPDATE_EMPLOYEE_PAGE1 = 5
UPDATE_EMPLOYEE_PAGE2 = 6
EMPLOYEE_LIST_PAGE = 7
ADMIN_TOTAL_MONEY = 8
EMPLOYEE_MENU_PAGE = 9
EMPLOYEE_CREATE_ACCOUNT_PAGE = 10
EMPLOYEE_SHOW_DETAILS_PAGE1 = 11
EMPLOYEE_SHOW_DETAILS_PAGE2 = 12
EMPLOYEE_ADD_BALANCE_SEARCH = 13
EMPLOYEE_ADD_BALANCE_PAGE = 14
EMPLOYEE_WITHDRAW_MONEY_SEARCH = 15
EMPLOYEE_WITHDRAW_MONEY_PAGE = 16
EMPLOYEE_CHECK_BALANCE_SEARCH = 17
EMPLOYEE_CHECK_BALANCE_PAGE = 18
EMPLOYEE_UPDATE_ACCOUNT_SEARCH = 19
EMPLOYEE_UPDATE_ACCOUNT_PAGE = 20
FONT_SIZE = QtGui.QFont("Segoe UI", 12)
# -------------------------------------------------------------------------------------------------------------
# === Reusable UI Component Functions ===
# -------------------------------------------------------------------------------------------------------------
def create_styled_frame(parent, min_size=None, style=""):
"""Create a styled QFrame with optional minimum size and custom style."""
frame = QtWidgets.QFrame(parent)
frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
frame.setFrameShadow(QtWidgets.QFrame.Raised)
if min_size:
frame.setMinimumSize(QtCore.QSize(*min_size))
frame.setStyleSheet(style)
return frame
def create_styled_label(
parent, text, font_size=12, bold=False, style="color: #2c3e50; padding: 10px;"
):
"""Create a styled QLabel with customizable font size and boldness."""
label = QtWidgets.QLabel(parent)
font = QtGui.QFont("Segoe UI", font_size)
if bold:
font.setBold(True)
font.setWeight(75)
label.setFont(font)
label.setStyleSheet(style)
label.setText(text)
return label
def create_styled_button(parent, text, min_size=None):
"""Create a styled QPushButton with hover and pressed effects."""
button = QtWidgets.QPushButton(parent)
if min_size:
button.setMinimumSize(QtCore.QSize(*min_size))
button.setStyleSheet("""
QPushButton {
background-color: #3498db;
color: white;
font-family: 'Segoe UI';
font-size: 16px;
font-weight: bold;
border-radius: 8px;
padding: 12px;
border: none;
}
QPushButton:hover {
background-color: #2980b9;
}
QPushButton:pressed {
background-color: #1c6ea4;
}
""")
button.setText(text)
return button
def create_input_field(parent, label_text, min_label_size=(120, 0)):
"""Create a horizontal layout with a label and a QLineEdit."""
frame = create_styled_frame(parent, style="padding: 7px;")
layout = QtWidgets.QHBoxLayout(frame)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
label = create_styled_label(
frame, label_text, font_size=12, bold=True, style="color: #2c3e50;"
)
if min_label_size:
label.setMinimumSize(QtCore.QSize(*min_label_size))
line_edit = QtWidgets.QLineEdit(frame)
line_edit.setFont(FONT_SIZE)
line_edit.setStyleSheet(
"background-color: #f0f0f0; border: 1px solid #ccc; border-radius: 4px; padding: 8px;"
)
layout.addWidget(label)
layout.addWidget(line_edit)
return frame, line_edit
def create_input_field_V(parent, label_text, min_label_size=(120, 0)):
"""Create a horizontal layout with a label and a QLineEdit."""
frame = create_styled_frame(parent, style="padding: 7px;")
layout = QtWidgets.QVBoxLayout(frame)
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
label = create_styled_label(
frame, label_text, font_size=12, bold=True, style="color: #2c3e50;"
)
if min_label_size:
label.setMinimumSize(QtCore.QSize(*min_label_size))
line_edit = QtWidgets.QLineEdit(frame)
line_edit.setStyleSheet(
"background-color: #f0f0f0; border: 1px solid #ccc; border-radius: 4px; padding: 8px;"
)
line_edit.setFont(FONT_SIZE)
layout.addWidget(label)
layout.addWidget(line_edit)
return frame, line_edit
def show_popup_message(
parent,
message: str,
page: int = None,
show_cancel: bool = False,
cancel_page: int = HOME_PAGE,
):
"""Reusable popup message box.
Args:
parent: The parent widget.
message (str): The message to display.
page (int, optional): Page index to switch to after dialog closes.
show_cancel (bool): Whether to show the Cancel button.
"""
dialog = QtWidgets.QDialog(parent)
dialog.setWindowTitle("Message")
dialog.setFixedSize(350, 100)
dialog.setStyleSheet("background-color: #f0f0f0;")
layout = QtWidgets.QVBoxLayout(dialog)
layout.setSpacing(10)
layout.setContentsMargins(15, 15, 15, 15)
label = QtWidgets.QLabel(message)
label.setStyleSheet("font-size: 12px; color: #2c3e50;")
label.setWordWrap(True)
layout.addWidget(label)
# Decide which buttons to show
if show_cancel:
button_box = QtWidgets.QDialogButtonBox(
QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel
)
else:
button_box = QtWidgets.QDialogButtonBox(QtWidgets.QDialogButtonBox.Ok)
button_box.setStyleSheet("""
QPushButton {
background-color: #3498db;
color: white;
border-radius: 4px;
padding: 6px 12px;
min-width: 80px;
}
QPushButton:hover {
background-color: #2980b9;
}
QPushButton:pressed {
background-color: #1c6ea4;
}
""")
layout.addWidget(button_box)
# Connect buttons
def on_accept():
if page is not None:
parent.setCurrentIndex(page)
dialog.accept()
def on_reject():
if page is not None:
parent.setCurrentIndex(cancel_page)
dialog.reject()
button_box.accepted.connect(on_accept)
button_box.rejected.connect(on_reject)
dialog.exec_()
def search_result(parent, title, label_text):
page, main_layout = create_page_with_header(parent, title)
content_frame = create_styled_frame(page)
content_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
content_layout = QtWidgets.QVBoxLayout(content_frame)
content_layout.alignment
form_frame = create_styled_frame(
content_frame,
min_size=(400, 200),
style="background-color: #ffffff; border-radius: 15px; padding: 10px;",
)
form_layout = QtWidgets.QVBoxLayout(form_frame)
form_layout.setSpacing(3)
# Define input fields
user = create_input_field(form_frame, label_text, min_label_size=(180, 0))
form_layout.addWidget(user[0])
user_account_number = user[1]
user_account_number.setFont(FONT_SIZE)
submit_button = create_styled_button(form_frame, "Submit", min_size=(100, 50))
form_layout.addWidget(submit_button)
content_layout.addWidget(
form_frame, 0, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
)
main_layout.addWidget(content_frame)
return page, (user_account_number, submit_button)
# -------------------------------------------------------------------------------------------------------------
# === Page Creation Functions ==
# -------------------------------------------------------------------------------------------------------------
def create_page_with_header(parent, title_text):
"""Create a page with a styled header and return the page + main layout."""
page = QtWidgets.QWidget(parent)
main_layout = QtWidgets.QVBoxLayout(page)
main_layout.setContentsMargins(20, 20, 20, 20)
main_layout.setSpacing(20)
header_frame = create_styled_frame(
page, style="background-color: #ffffff; border-radius: 10px; padding: 10px;"
)
header_layout = QtWidgets.QVBoxLayout(header_frame)
title_label = create_styled_label(header_frame, title_text, font_size=30)
header_layout.addWidget(title_label, 0, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignTop)
main_layout.addWidget(header_frame, 0, QtCore.Qt.AlignTop)
return page, main_layout
def get_employee_name(parent, name_field_text="Enter Employee Name"):
page, main_layout = create_page_with_header(parent, "Employee Data Update")
# Content frame
content_frame = create_styled_frame(page)
content_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
content_layout = QtWidgets.QVBoxLayout(content_frame)
# Form frame
form_frame = create_styled_frame(
content_frame,
min_size=(340, 200),
style="background-color: #ffffff; border-radius: 15px; padding: 10px;",
)
form_layout = QtWidgets.QVBoxLayout(form_frame)
# Form fields
name_label, name_field = create_input_field(form_frame, name_field_text)
search_button = create_styled_button(form_frame, "Search", min_size=(100, 30))
form_layout.addWidget(name_label)
form_layout.addWidget(search_button)
content_layout.addWidget(
form_frame, 0, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
)
main_layout.addWidget(content_frame)
def on_search_button_clicked():
global employee_data
entered_name = name_field.text().strip()
print(f"Entered Name: {entered_name}")
if not entered_name:
QtWidgets.QMessageBox.warning(
parent, "Input Error", "Please enter an employee name."
)
return
try:
employee_check = backend.check_name_in_staff(entered_name)
print(f"Employee Check: {type(employee_check)},{employee_check}")
if employee_check:
cur = backend.cur
cur.execute("SELECT * FROM staff WHERE name = ?", (entered_name,))
employee_data = cur.fetchone()
print(f"Employee Data: {employee_data}")
parent.setCurrentIndex(UPDATE_EMPLOYEE_PAGE2)
# if employee_data:
# QtWidgets.QMessageBox.information(parent, "Employee Found",
# f"Employee data:\nID: {fetch[0]}\nName: {fetch[1]}\nDept: {fetch[2]}\nRole: {fetch[3]}")
else:
QtWidgets.QMessageBox.information(
parent, "Not Found", "Employee not found."
)
except Exception as e:
QtWidgets.QMessageBox.critical(
parent, "Error", f"An error occurred: {str(e)}"
)
search_button.clicked.connect(on_search_button_clicked)
return page
# backend.check_name_in_staff()
def create_login_page(
parent,
title,
name_field_text="Name :",
password_field_text="Password :",
submit_text="Submit",
):
"""Create a login page with a title, name and password fields, and a submit button."""
page, main_layout = create_page_with_header(parent, title)
# Content frame
content_frame = create_styled_frame(page)
content_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
content_layout = QtWidgets.QVBoxLayout(content_frame)
# Form frame
form_frame = create_styled_frame(
content_frame,
min_size=(340, 200),
style="background-color: #ffffff; border-radius: 15px; padding: 10px;",
)
form_layout = QtWidgets.QVBoxLayout(form_frame)
form_layout.setSpacing(20)
# Input fields
name_frame, name_edit = create_input_field(form_frame, name_field_text)
password_frame, password_edit = create_input_field(form_frame, password_field_text)
# Submit button
button_frame = create_styled_frame(form_frame, style="padding: 7px;")
button_layout = QtWidgets.QVBoxLayout(button_frame)
button_layout.setSpacing(60)
submit_button = create_styled_button(button_frame, submit_text, min_size=(150, 0))
button_layout.addWidget(submit_button, 0, QtCore.Qt.AlignHCenter)
form_layout.addWidget(name_frame)
form_layout.addWidget(password_frame)
form_layout.addWidget(button_frame)
content_layout.addWidget(
form_frame, 0, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
)
main_layout.addWidget(content_frame)
return page, name_edit, password_edit, submit_button
def on_login_button_clicked(parent, name_field, password_field):
name = name_field.text().strip()
password = password_field.text().strip()
if not name or not password:
show_popup_message(parent, "Please enter your name and password.", HOME_PAGE)
else:
try:
# Ideally, here you'd call a backend authentication check
success = backend.check_admin(name, password)
if success:
QtWidgets.QMessageBox.information(
parent, "Login Successful", f"Welcome, {name}!"
)
else:
QtWidgets.QMessageBox.warning(
parent, "Login Failed", "Incorrect name or password."
)
except Exception as e:
QtWidgets.QMessageBox.critical(
parent, "Error", f"An error occurred during login: {str(e)}"
)
def create_home_page(parent, on_admin_clicked, on_employee_clicked, on_exit_clicked):
"""Create the home page with Admin, Employee, and Exit buttons."""
page, main_layout = create_page_with_header(parent, "Admin Menu")
# Button frame
button_frame = create_styled_frame(page)
button_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
button_layout = QtWidgets.QVBoxLayout(button_frame)
# Button container
button_container = create_styled_frame(
button_frame,
min_size=(300, 0),
style="background-color: #ffffff; border-radius: 15px; padding: 20px;",
)
button_container_layout = QtWidgets.QVBoxLayout(button_container)
button_container_layout.setSpacing(15)
# Buttons
admin_button = create_styled_button(button_container, "Admin")
employee_button = create_styled_button(button_container, "Employee")
exit_button = create_styled_button(button_container, "Exit")
exit_button.setStyleSheet("""
QPushButton {
background-color: #e74c3c;
color: white;
font-family: 'Segoe UI';
font-size: 16px;
font-weight: bold;
border-radius: 8px;
padding: 12px;
border: none;
}
QPushButton:hover {
background-color: #c0392b;
}
QPushButton:pressed {
background-color: #992d22;
}
""")
button_container_layout.addWidget(admin_button)
button_container_layout.addWidget(employee_button)
button_container_layout.addWidget(exit_button)
button_layout.addWidget(
button_container, 0, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
)
main_layout.addWidget(button_frame)
# Connect button signals
admin_button.clicked.connect(on_admin_clicked)
employee_button.clicked.connect(on_employee_clicked)
exit_button.clicked.connect(on_exit_clicked)
return page
def create_admin_menu_page(parent):
page, main_layout = create_page_with_header(parent, "Admin Menu")
button_frame = create_styled_frame(page)
button_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
button_layout = QtWidgets.QVBoxLayout(button_frame)
button_container = create_styled_frame(
button_frame,
min_size=(300, 0),
style="background-color: #ffffff; border-radius: 15px; padding: 20px;",
)
button_container_layout = QtWidgets.QVBoxLayout(button_container)
button_container_layout.setSpacing(15)
# Define button labels
button_labels = [
"Add Employee",
"Update Employee",
"Employee List",
"Total Money",
"Back",
]
buttons = []
for label in button_labels:
btn = create_styled_button(button_container, label)
button_container_layout.addWidget(btn)
buttons.append(btn)
button_layout.addWidget(
button_container, 0, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
)
main_layout.addWidget(button_frame)
return page, *buttons # Unpack as add_button, update_employee, etc.
def create_add_employee_page(
parent, title, submit_text="Submit", update_btn: bool = False
):
page, main_layout = create_page_with_header(parent, title)
content_frame = create_styled_frame(page)
content_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
content_layout = QtWidgets.QVBoxLayout(content_frame)
form_frame = create_styled_frame(
content_frame,
min_size=(340, 200),
style="background-color: #ffffff; border-radius: 15px; padding: 10px;",
)
form_layout = QtWidgets.QVBoxLayout(form_frame)
form_layout.setSpacing(10)
# Define input fields
fields = ["Name :", "Password :", "Salary :", "Position :"]
name_edit = None
password_edit = None
salary_edit = None
position_edit = None
edits = []
for i, field in enumerate(fields):
field_frame, field_edit = create_input_field(form_frame, field)
form_layout.addWidget(field_frame)
if i == 0:
name_edit = field_edit
elif i == 1:
password_edit = field_edit
elif i == 2:
salary_edit = field_edit
elif i == 3:
position_edit = field_edit
edits.append(field_edit)
# Submit button
button_frame = create_styled_frame(form_frame, style="padding: 7px;")
button_layout = QtWidgets.QVBoxLayout(button_frame)
if update_btn:
update_button = create_styled_button(button_frame, "Update", min_size=(100, 50))
button_layout.addWidget(update_button, 0, QtCore.Qt.AlignHCenter)
else:
submit_button = create_styled_button(
button_frame, submit_text, min_size=(100, 50)
)
button_layout.addWidget(submit_button, 0, QtCore.Qt.AlignHCenter)
form_layout.addWidget(button_frame)
content_layout.addWidget(
form_frame, 0, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
)
main_layout.addWidget(content_frame)
back_btn = QtWidgets.QPushButton("Back", content_frame)
back_btn.setStyleSheet("""
QPushButton {
background-color: #6c757d;
color: white;
border: none;
border-radius: 4px;
padding: 8px 16px;
font-size: 14px;
}
QPushButton:hover {
background-color: #5a6268;
}
""")
back_btn.clicked.connect(lambda: parent.setCurrentIndex(ADMIN_MENU_PAGE))
main_layout.addWidget(back_btn, 0, alignment=QtCore.Qt.AlignLeft)
if update_btn:
return page, name_edit, password_edit, salary_edit, position_edit, update_button
else:
return (
page,
name_edit,
password_edit,
salary_edit,
position_edit,
submit_button,
) # Unpack as name_edit, password_edit, etc.
def show_employee_list_page(parent, title):
page, main_layout = create_page_with_header(parent, title)
content_frame = create_styled_frame(
page, style="background-color: #f9f9f9; border-radius: 10px; padding: 15px;"
)
content_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
content_layout = QtWidgets.QVBoxLayout(content_frame)
# Table frame
table_frame = create_styled_frame(
content_frame,
style="background-color: #ffffff; border-radius: 8px; padding: 10px;",
)
table_layout = QtWidgets.QVBoxLayout(table_frame)
table_layout.setSpacing(0)
# Header row
header_frame = create_styled_frame(
table_frame,
style="background-color: #f5f5f5; ; border-radius: 8px 8px 0 0; padding: 10px;",
)
header_layout = QtWidgets.QHBoxLayout(header_frame)
header_layout.setContentsMargins(10, 5, 10, 5)
headers = ["Name", "Position", "Salary"]
for i, header in enumerate(headers):
header_label = QtWidgets.QLabel(header, header_frame)
header_label.setStyleSheet(
"font-weight: bold; font-size: 14px; color: #333333; padding: 0px; margin: 0px;"
)
if i == 2: # Right-align salary header
header_label.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
else:
header_label.setAlignment(QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter)
header_layout.addWidget(
header_label, 1 if i < 2 else 0
) # Stretch name and position, not salary
table_layout.addWidget(header_frame)
# Employee rows
employees = backend.show_employees_for_update()
for row, employee in enumerate(employees):
row_frame = create_styled_frame(
table_frame,
style=f"background-color: {'#fafafa' if row % 2 else '#ffffff'}; padding: 8px;",
)
row_layout = QtWidgets.QHBoxLayout(row_frame)
row_layout.setContentsMargins(10, 5, 10, 5)
# Name
name_label = QtWidgets.QLabel(employee[0], row_frame)
name_label.setStyleSheet(
"font-size: 14px; color: #333333; padding: 0px; margin: 0px;"
)
name_label.setAlignment(QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter)
row_layout.addWidget(name_label, 1)
# Position
position_label = QtWidgets.QLabel(employee[3], row_frame)
position_label.setStyleSheet(
"font-size: 14px; color: #333333; padding: 0px; margin: 0px;"
)
position_label.setAlignment(QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter)
row_layout.addWidget(position_label, 1)
# Salary (formatted as currency)
salary_label = QtWidgets.QLabel(f"${float(employee[2]):,.2f}", row_frame)
salary_label.setStyleSheet(
"font-size: 14px; color: #333333; padding: 0px; margin: 0px;"
)
salary_label.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)
row_layout.addWidget(salary_label, 0)
table_layout.addWidget(row_frame)
# Add stretch to prevent rows from expanding vertically
table_layout.addStretch()
# Back button
back_button = QtWidgets.QPushButton("Back", content_frame)
back_button.setStyleSheet("""
QPushButton {
background-color: #6c757d;
color: white;
border: none;
border-radius: 4px;
padding: 8px 16px;
font-size: 14px;
}
QPushButton:hover {
background-color: #5a6268;
}
""")
back_button.clicked.connect(lambda: parent.setCurrentIndex(ADMIN_MENU_PAGE))
content_layout.addWidget(table_frame)
main_layout.addWidget(back_button, alignment=QtCore.Qt.AlignLeft)
main_layout.addWidget(content_frame)
return page
def show_total_money(parent, title):
page, main_layout = create_page_with_header(parent, title)
content_frame = create_styled_frame(
page, style="background-color: #f9f9f9; border-radius: 10px; padding: 15px;"
)
content_layout = QtWidgets.QVBoxLayout(content_frame)
content_layout.setProperty("spacing", 10)
all = backend.all_money()
# Total money label
total_money_label = QtWidgets.QLabel(f"Total Money: ${all}", content_frame)
total_money_label.setStyleSheet(
"font-size: 24px; font-weight: bold; color: #333333;"
)
content_layout.addWidget(total_money_label, alignment=QtCore.Qt.AlignCenter)
# Back button
back_button = QtWidgets.QPushButton("Back", content_frame)
back_button.setStyleSheet("""
QPushButton {
background-color: #6c757d;
color: white;
border: none;
border-radius: 4px;
padding: 8px 16px;
font-size: 14px;
}
QPushButton:hover {
background-color: #5a6268;
}
""")
back_button.clicked.connect(lambda: parent.setCurrentIndex(ADMIN_MENU_PAGE))
content_layout.addWidget(back_button, alignment=QtCore.Qt.AlignCenter)
main_layout.addWidget(content_frame)
return page
# -----------employees menu pages-----------
def create_employee_menu_page(parent, title):
page, main_layout = create_page_with_header(parent, title)
button_frame = create_styled_frame(page)
button_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
button_layout = QtWidgets.QVBoxLayout(button_frame)
button_container = create_styled_frame(
button_frame,
min_size=(300, 0),
style="background-color: #ffffff; border-radius: 15px; padding: 20px;",
)
button_container_layout = QtWidgets.QVBoxLayout(button_container)
button_container_layout.setSpacing(15)
# Define button labels
button_labels = [
"Create Account ",
"Show Details",
"Add Balance",
"Withdraw Money",
"Chack Balanace",
"Update Account",
"list of all Members",
"Delete Account",
"Back",
]
buttons = []
for label in button_labels:
btn: QtWidgets.QPushButton = create_styled_button(button_container, label)
button_container_layout.addWidget(btn)
buttons.append(btn)
button_layout.addWidget(
button_container, 0, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
)
main_layout.addWidget(button_frame)
return page, *buttons # Unpack as add_button, update_employee, etc.
def create_account_page(parent, title, update_btn=False):
page, main_layout = create_page_with_header(parent, title)
content_frame = create_styled_frame(page)
content_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
content_layout = QtWidgets.QVBoxLayout(content_frame)
form_frame = create_styled_frame(
content_frame,
min_size=(400, 200),
style="background-color: #ffffff; border-radius: 15px; padding: 10px;",
)
form_layout = QtWidgets.QVBoxLayout(form_frame)
form_layout.setSpacing(3)
# Define input fields
fields = ["Name :", "Age :", "Address", "Balance :", "Mobile number :"]
edits = []
for i, field in enumerate(fields):
field_frame, field_edit = create_input_field(
form_frame, field, min_label_size=(160, 0)
)
form_layout.addWidget(field_frame)
field_edit.setFont(QtGui.QFont("Arial", 12))
if i == 0:
name_edit = field_edit
elif i == 1:
Age_edit = field_edit
elif i == 2:
Address_edit = field_edit
elif i == 3:
Balance_edit = field_edit
elif i == 4:
Mobile_number_edit = field_edit
edits.append(field_edit)
# Dropdown for account type
account_type_label = QtWidgets.QLabel("Account Type :", form_frame)
account_type_label.setStyleSheet(
"font-size: 14px; font-weight: bold; color: #333333;"
)
form_layout.addWidget(account_type_label)
account_type_dropdown = QtWidgets.QComboBox(form_frame)
account_type_dropdown.addItems(["Savings", "Current", "Fixed Deposit"])
account_type_dropdown.setStyleSheet("""
QComboBox {
padding: 5px;
border: 1px solid #ccc;
border-radius: 4px;
background-color: white;
min-width: 200px;
font-size: 14px;
}
QComboBox:hover {
border: 1px solid #999;
}
QComboBox::drop-down {
border: none;
width: 25px;
}
QComboBox::down-arrow {
width: 12px;
height: 12px;
}
QComboBox QAbstractItemView {
border: 1px solid #ccc;
background-color: white;
selection-background-color: #0078d4;
selection-color: white;
}
""")
form_layout.addWidget(account_type_dropdown)
# Submit button
button_frame = create_styled_frame(form_frame, style="padding: 7px;")
button_layout = QtWidgets.QVBoxLayout(button_frame)
if update_btn:
submit_button = create_styled_button(button_frame, "Update", min_size=(100, 50))
else:
submit_button = create_styled_button(button_frame, "Submit", min_size=(100, 50))
button_layout.addWidget(submit_button, 0, QtCore.Qt.AlignHCenter)
form_layout.addWidget(button_frame)
content_layout.addWidget(
form_frame, 0, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
)
main_layout.addWidget(content_frame)
back_btn = QtWidgets.QPushButton("Back", content_frame)
back_btn.setStyleSheet("""
QPushButton {
background-color: #6c757d;
color: white;
border: none;
border-radius: 4px;
padding: 8px 16px;
font-size: 14px;
}
QPushButton:hover {
background-color: #5a6268;
}
""")
back_btn.clicked.connect(lambda: parent.setCurrentIndex(EMPLOYEE_MENU_PAGE))
main_layout.addWidget(back_btn, 0, alignment=QtCore.Qt.AlignLeft)
return page, (
name_edit,
Age_edit,
Address_edit,
Balance_edit,
Mobile_number_edit,
account_type_dropdown,
submit_button,
)
def create_show_details_page1(parent, title):
page, main_layout = create_page_with_header(parent, title)
content_frame = create_styled_frame(page)
content_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
content_layout = QtWidgets.QVBoxLayout(content_frame)
form_frame = create_styled_frame(
content_frame,
min_size=(400, 200),
style="background-color: #ffffff; border-radius: 15px; padding: 10px;",
)
form_layout = QtWidgets.QVBoxLayout(form_frame)
form_layout.setSpacing(3)
# Define input fields
bannk_user = create_input_field(
form_frame, "Enter Bank account Number :", min_label_size=(180, 0)
)
form_layout.addWidget(bannk_user[0])
user_account_number = bannk_user[1]
submit_button = create_styled_button(form_frame, "Submit", min_size=(100, 50))
form_layout.addWidget(submit_button)
content_layout.addWidget(
form_frame, 0, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
)
main_layout.addWidget(content_frame)
return page, (user_account_number, submit_button)
def create_show_details_page2(parent, title):
page, main_layout = create_page_with_header(parent, title)
content_frame = create_styled_frame(page)
content_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
content_layout = QtWidgets.QVBoxLayout(content_frame)
form_frame = create_styled_frame(
content_frame,
min_size=(400, 200),
style="background-color: #ffffff; border-radius: 15px; padding: 10px;",
)
form_layout = QtWidgets.QVBoxLayout(form_frame)
form_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
form_layout.setSpacing(3)
# Define input fields
labeles = [
"Account No: ",
"Name: ",
"Age:",
"Address: ",
"Balance: ",
"Mobile Number: ",
"Account Type: ",
]
for i in range(len(labeles)):
label_frame, input_field = create_input_field(
form_frame, labeles[i], min_label_size=(180, 30)
)
form_layout.addWidget(label_frame)
input_field.setReadOnly(True)
input_field.setFont(QtGui.QFont("Arial", 12))
if i == 0:
account_no_field = input_field
elif i == 1:
name_field = input_field
elif i == 2:
age_field = input_field
elif i == 3:
address_field = input_field
elif i == 4:
balance_field = input_field
elif i == 5:
mobile_number_field = input_field
elif i == 6:
account_type_field = input_field
exite_btn = create_styled_button(form_frame, "Exit", min_size=(100, 50))
exite_btn.setStyleSheet("""
QPushButton {
background-color: #6c757d;
color: white;
border: none;
border-radius: 4px;
padding: 8px 16px;
font-size: 14px;
}
QPushButton:hover {
background-color: #5a6268;
}
""")
exite_btn.clicked.connect(lambda: parent.setCurrentIndex(EMPLOYEE_MENU_PAGE))
content_layout.addWidget(
form_frame, 0, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
)
main_layout.addWidget(content_frame)
main_layout.addWidget(exite_btn)
return page, (
account_no_field,
name_field,
age_field,
address_field,
balance_field,
mobile_number_field,
account_type_field,
exite_btn,
)
def update_user(parent, title, input_fields_label, input_fielf: bool = True):
page, main_layout = create_page_with_header(parent, title)
content_frame = create_styled_frame(page)
content_frame.setSizePolicy(
QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding
)
content_layout = QtWidgets.QVBoxLayout(content_frame)
content_layout.alignment
form_frame = create_styled_frame(
content_frame,
min_size=(400, 200),
style="background-color: #ffffff; border-radius: 15px; padding: 10px;",
)
form_layout = QtWidgets.QVBoxLayout(form_frame)
form_layout.setSpacing(3)
# Define input fields
user = create_input_field(form_frame, "User Name: ", min_label_size=(180, 0))
user_balance = create_input_field(form_frame, "Balance: ", min_label_size=(180, 0))
# Add input fields to the form layout
form_layout.addWidget(user[0])
form_layout.addWidget(user_balance[0])
if input_fielf:
user_update_balance = create_input_field_V(
form_frame, input_fields_label, min_label_size=(180, 0)
)
form_layout.addWidget(user_update_balance[0])
# Store the input fields in variables
user_account_name = user[1]
user_account_name.setReadOnly(True)
user_account_name.setStyleSheet(
"background-color: #8a8a8a; border: 1px solid #ccc; border-radius: 4px; padding: 8px;"
)
user_balance_field = user_balance[1]
user_balance_field.setReadOnly(True)
user_balance_field.setStyleSheet(
"background-color: #8a8a8a; border: 1px solid #ccc; border-radius: 4px; padding: 8px;"
)
if input_fielf:
user_update_balance_field = user_update_balance[1]
user_update_balance_field.setStyleSheet(
"background-color: #f0f0f0; border: 1px solid #ccc; border-radius: 4px; padding: 8px;"
)
# Set the font size for the input fields
user_account_name.setFont(FONT_SIZE)
user_balance_field.setFont(FONT_SIZE)
if input_fielf:
user_update_balance_field.setFont(FONT_SIZE)
# Add a submit button
submit_button = create_styled_button(form_frame, "Submit", min_size=(100, 50))
form_layout.addWidget(submit_button)
content_layout.addWidget(
form_frame, 0, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter
)
main_layout.addWidget(content_frame)
back_btn = create_styled_button(content_frame, "Back", min_size=(100, 50))
back_btn.setStyleSheet("""
QPushButton {
background-color: #6c757d;
color: white;
border: none;
border-radius: 4px;
padding: 8px 16px;
font-size: 14px;
}
QPushButton:hover {
background-color: #5a6268;
}
""")
back_btn.clicked.connect(lambda: parent.setCurrentIndex(EMPLOYEE_MENU_PAGE))
backend
if input_fielf:
return page, (
user_account_name,
user_balance_field,
user_update_balance_field,
submit_button,
)
else:
return page, (user_account_name, user_balance_field, submit_button)
# -------------------------------------------------------------------------------------------------------------
# === Main Window Setup ===
# -------------------------------------------------------------------------------------------------------------
def setup_main_window(main_window: QtWidgets.QMainWindow):
"""Set up the main window with a stacked widget containing home, admin, and employee pages."""
main_window.setObjectName("MainWindow")
main_window.resize(800, 600)
main_window.setStyleSheet("background-color: #f0f2f5;")
central_widget = QtWidgets.QWidget(main_window)
main_layout = QtWidgets.QHBoxLayout(central_widget)
stacked_widget = QtWidgets.QStackedWidget(central_widget)
# Create pages
def switch_to_admin():
stacked_widget.setCurrentIndex(ADMIN_PAGE)
def switch_to_employee():
stacked_widget.setCurrentIndex(EMPLOYEE_PAGE)
def exit_app():
QtWidgets.QApplication.quit()
def admin_login_menu_page(name, password):
try:
# Ideally, here you'd call a backend authentication check
success = backend.check_admin(name, password)
if success:
QtWidgets.QMessageBox.information(
stacked_widget, "Login Successful", f"Welcome, {name}!"
)
stacked_widget.setCurrentIndex(ADMIN_MENU_PAGE)
else:
QtWidgets.QMessageBox.warning(
stacked_widget, "Login Failed", "Incorrect name or password."
)
except Exception as e:
QtWidgets.QMessageBox.critical(
stacked_widget, "Error", f"An error occurred during login: {str(e)}"
)
# show_popup_message(stacked_widget,"Invalid admin credentials",0)
def add_employee_form_submit(name, password, salary, position):
if (
len(name) != 0
and len(password) != 0
and len(salary) != 0
and len(position) != 0
):
backend.create_employee(name, password, salary, position)
show_popup_message(
stacked_widget, "Employee added successfully", ADMIN_MENU_PAGE
)
else:
print("Please fill in all fields")
show_popup_message(
stacked_widget, "Please fill in all fields", ADD_EMPLOYEE_PAGE
)
def update_employee_data(name, password, salary, position, name_to_update):
try:
cur = backend.cur
if name_to_update:
cur.execute(
"UPDATE staff SET Name = ? WHERE name = ?", (name, name_to_update)
)
cur.execute("UPDATE staff SET Name = ? WHERE name = ?", (password, name))
cur.execute(
"UPDATE staff SET password = ? WHERE name = ?", (password, name)
)
cur.execute("UPDATE staff SET salary = ? WHERE name = ?", (salary, name))
cur.execute(
"UPDATE staff SET position = ? WHERE name = ?", (position, name)
)
backend.conn.commit()
show_popup_message(
stacked_widget, "Employee Update successfully", UPDATE_EMPLOYEE_PAGE2
)
except:
show_popup_message(
stacked_widget, "Please fill in all fields", UPDATE_EMPLOYEE_PAGE2
)
# Create Home Page
home_page = create_home_page(
stacked_widget, switch_to_admin, switch_to_employee, exit_app
)
# ------------------------------------------------------------------------------------------------
# -------------------------------------Admin panel page ---------------------------------------
# ------------------------------------------------------------------------------------------------
# Create Admin Login Page
admin_page, admin_name, admin_password, admin_submit = create_login_page(
stacked_widget, title="Admin Login"
)
admin_password.setEchoMode(QtWidgets.QLineEdit.Password)
admin_name.setFont(QtGui.QFont("Arial", 10))
admin_password.setFont(QtGui.QFont("Arial", 10))
admin_name.setPlaceholderText("Enter your name")
admin_password.setPlaceholderText("Enter your password")
admin_submit.clicked.connect(
lambda: admin_login_menu_page(admin_name.text(), admin_password.text())
)
# Create Admin Menu Page
(
admin_menu_page,
add_button,
update_button,
list_button,
money_button,
back_button,
) = create_admin_menu_page(stacked_widget)
add_button.clicked.connect(
lambda: stacked_widget.setCurrentIndex(ADD_EMPLOYEE_PAGE)
)
update_button.clicked.connect(
lambda: stacked_widget.setCurrentIndex(UPDATE_EMPLOYEE_PAGE1)
)
list_button.clicked.connect(
lambda: stacked_widget.setCurrentIndex(EMPLOYEE_LIST_PAGE)
)
back_button.clicked.connect(lambda: stacked_widget.setCurrentIndex(HOME_PAGE))
money_button.clicked.connect(
lambda: stacked_widget.setCurrentIndex(ADMIN_TOTAL_MONEY)
)
# Create Add Employee Page
add_employee_page, emp_name, emp_password, emp_salary, emp_position, emp_submit = (
create_add_employee_page(stacked_widget, title="Add Employee")
)
# Update Employee Page
u_employee_page1 = get_employee_name(stacked_widget)
# apply the update_employee_data function to the submit button
(
u_employee_page2,
u_employee_name,
u_employee_password,
u_employee_salary,
u_employee_position,
u_employee_update,
) = create_add_employee_page(
stacked_widget, "Update Employee Details", update_btn=True
)
def populate_employee_data():
global employee_data
if employee_data:
print("employee_data is not None")
u_employee_name.setText(str(employee_data[0])) # Name
u_employee_password.setText(str(employee_data[1])) # Password
u_employee_salary.setText(str(employee_data[2])) # Salary
u_employee_position.setText(str(employee_data[3])) # Position
else:
# Clear fields if no employee data is available
print("employee_data is None")
u_employee_name.clear()
u_employee_password.clear()
u_employee_salary.clear()
u_employee_position.clear()
QtWidgets.QMessageBox.warning(
stacked_widget, "No Data", "No employee data available to display."
)
def on_page_changed(index):
if index == 6: # update_employee_page2 is at index 6
populate_employee_data()
# Connect the currentChanged signal to the on_page_changed function
stacked_widget.currentChanged.connect(on_page_changed)
def update_employee_data(name, password, salary, position, name_to_update):
try:
if not name_to_update:
show_popup_message(
stacked_widget,
"Original employee name is missing.",
UPDATE_EMPLOYEE_PAGE2,
)
return
if not (name or password or salary or position):
show_popup_message(
stacked_widget,
"Please fill at least one field to update.",
UPDATE_EMPLOYEE_PAGE2,
)
return
if name:
backend.update_employee_name(name, name_to_update)
if password:
backend.update_employee_password(password, name_to_update)
if salary:
try:
salary = int(salary)
backend.update_employee_salary(salary, name_to_update)
except ValueError:
show_popup_message(
stacked_widget, "Salary must be a valid number.", 5
)
return
if position:
backend.update_employee_position(position, name_to_update)
show_popup_message(
stacked_widget, "Employee updated successfully.", ADMIN_MENU_PAGE
)
except Exception as e:
show_popup_message(
stacked_widget,
f"Error updating employee: {str(e)}",
UPDATE_EMPLOYEE_PAGE2,
show_cancel=True,
cancel_page=ADMIN_MENU_PAGE,
)
u_employee_update.clicked.connect(
lambda: update_employee_data(
u_employee_name.text().strip(),
u_employee_password.text().strip(),
u_employee_salary.text().strip(),
u_employee_position.text().strip(),
employee_data[0] if employee_data else "",
)
)
emp_submit.clicked.connect(
lambda: add_employee_form_submit(
emp_name.text(), emp_password.text(), emp_salary.text(), emp_position.text()
)
)
# show employee list page
employee_list_page = show_employee_list_page(stacked_widget, "Employee List")
admin_total_money = show_total_money(stacked_widget, "Total Money")
# ------------------------------------------------------------------------------------------------
# -------------------------------------Employee panel page ---------------------------------------
# ------------------------------------------------------------------------------------------------
# Create Employee Login Page
employee_page, employee_name, employee_password, employee_submit = (
create_login_page(stacked_widget, title="Employee Login")
)
employee_submit.clicked.connect(
lambda: stacked_widget.setCurrentIndex(EMPLOYEE_MENU_PAGE)
)
(
employee_menu_page,
E_Create_Account,
E_Show_Details,
E_add_Balance,
E_Withdraw_Money,
E_Chack_Balanace,
E_Update_Account,
E_list_of_all_Members,
E_Delete_Account,
E_Back,
) = create_employee_menu_page(stacked_widget, "Employee Menu")
# List of all page
E_Create_Account.clicked.connect(
lambda: stacked_widget.setCurrentIndex(EMPLOYEE_CREATE_ACCOUNT_PAGE)
)
E_Show_Details.clicked.connect(
lambda: stacked_widget.setCurrentIndex(EMPLOYEE_SHOW_DETAILS_PAGE1)
)
E_add_Balance.clicked.connect(
lambda: stacked_widget.setCurrentIndex(EMPLOYEE_ADD_BALANCE_SEARCH)
)
E_Withdraw_Money.clicked.connect(
lambda: stacked_widget.setCurrentIndex(EMPLOYEE_WITHDRAW_MONEY_SEARCH)
)
E_Chack_Balanace.clicked.connect(
lambda: stacked_widget.setCurrentIndex(EMPLOYEE_CHECK_BALANCE_SEARCH)
)
E_Update_Account.clicked.connect(
lambda: stacked_widget.setCurrentIndex(EMPLOYEE_UPDATE_ACCOUNT_SEARCH)
)
# E_list_of_all_Members.clicked.connect(lambda: stacked_widget.setCurrentIndex(EMPLOYEE_LIST_OF_ALL_MEMBERS_PAGE))
# E_Delete_Account.clicked.connect(lambda: stacked_widget.setCurrentIndex(EMPLOYEE_DELETE_ACCOUNT_PAGE))
# E_Back.clicked.connect(lambda: stacked_widget.setCurrentIndex(EMPLOYEE_MENU_PAGE))
employee_create_account_page, all_employee_menu_btn = create_account_page(
stacked_widget, "Create Account"
)
all_employee_menu_btn[6].clicked.connect(
lambda: add_account_form_submit(
all_employee_menu_btn[0].text().strip(),
all_employee_menu_btn[1].text().strip(),
all_employee_menu_btn[2].text().strip(),
all_employee_menu_btn[3].text().strip(),
all_employee_menu_btn[5].currentText(),
all_employee_menu_btn[4].text().strip(),
)
)
def add_account_form_submit(name, age, address, balance, account_type, mobile):
if (
len(name) != 0
and len(age) != 0
and len(address) != 0
and len(balance) != 0
and len(account_type) != 0
and len(mobile) != 0
):
try:
balance = int(balance)
except ValueError:
show_popup_message(
stacked_widget,
"Balance must be a valid number",
EMPLOYEE_CREATE_ACCOUNT_PAGE,
)
return
if balance < 0:
show_popup_message(
stacked_widget,
"Balance cannot be negative",
EMPLOYEE_CREATE_ACCOUNT_PAGE,
)
return
if account_type not in ["Savings", "Current", "Fixed Deposit"]:
show_popup_message(
stacked_widget, "Invalid account type", EMPLOYEE_CREATE_ACCOUNT_PAGE
)
return
if len(mobile) != 10:
show_popup_message(
stacked_widget,
"Mobile number must be 10 digits",
EMPLOYEE_CREATE_ACCOUNT_PAGE,
)
return
if not mobile.isdigit():
show_popup_message(
stacked_widget,
"Mobile number must contain only digits",
EMPLOYEE_CREATE_ACCOUNT_PAGE,
)
return
if not name.isalpha():
show_popup_message(
stacked_widget,
"Name must contain only alphabets",
EMPLOYEE_CREATE_ACCOUNT_PAGE,
)
return
if not age.isdigit():
show_popup_message(
stacked_widget,
"Age must contain only digits",
EMPLOYEE_CREATE_ACCOUNT_PAGE,
)
return
if int(age) < 18:
show_popup_message(
stacked_widget,
"Age must be greater than 18",
EMPLOYEE_CREATE_ACCOUNT_PAGE,
)
return
if len(address) < 10:
show_popup_message(
stacked_widget,
"Address must be at least 10 characters long",
EMPLOYEE_CREATE_ACCOUNT_PAGE,
)
return
backend.create_customer(name, age, address, balance, account_type, mobile)
all_employee_menu_btn[0].setText("")
all_employee_menu_btn[1].setText("")
all_employee_menu_btn[2].setText("")
all_employee_menu_btn[3].setText("")
all_employee_menu_btn[4].setText("")
(all_employee_menu_btn[5].currentText(),)
show_popup_message(
stacked_widget,
"Account created successfully",
EMPLOYEE_MENU_PAGE,
False,
)
else:
show_popup_message(
stacked_widget,
"Please fill in all fields",
EMPLOYEE_CREATE_ACCOUNT_PAGE,
)
# Add pages to stacked widget
show_bank_user_data_page1, show_bank_user_other1 = create_show_details_page1(
stacked_widget, "Show Details"
)
show_bank_user_data_page2, show_bank_user_other2 = create_show_details_page2(
stacked_widget, "Show Details"
)
show_bank_user_other1[1].clicked.connect(
lambda: show_bank_user_data_page1_submit_btn(
int(show_bank_user_other1[0].text().strip())
)
)
def show_bank_user_data_page1_submit_btn(name: int):
account_data = backend.get_details(name)
if account_data:
show_bank_user_other1[0].setText("")
show_bank_user_other2[0].setText(str(account_data[0]))
show_bank_user_other2[1].setText(str(account_data[1]))
show_bank_user_other2[2].setText(str(account_data[2]))
show_bank_user_other2[3].setText(str(account_data[3]))
show_bank_user_other2[4].setText(str(account_data[4]))
show_bank_user_other2[5].setText(str(account_data[5]))
show_bank_user_other2[6].setText(str(account_data[6]))
stacked_widget.setCurrentIndex(EMPLOYEE_SHOW_DETAILS_PAGE2)
else:
show_popup_message(
stacked_widget, "Account not found", EMPLOYEE_SHOW_DETAILS_PAGE1
)
def setup_balance_operation_flow(
stacked_widget,
title_search,
placeholder,
title_form,
action_button_text,
success_message,
backend_action_fn,
stacked_page_index,
search_index,
page_index,
need_input=True,
):
# Create search UI
search_page, search_widgets = search_result(
stacked_widget, title_search, placeholder
)
search_input = search_widgets[0]
search_button = search_widgets[1]
# Create update UI
form_page, form_widgets = update_user(
stacked_widget, title_form, action_button_text, need_input
)
if need_input:
name_field, balance_field, amount_field, action_button = form_widgets
else:
name_field, balance_field, action_button = form_widgets
def on_search_submit():
try:
account_number = int(search_input.text().strip())
except ValueError:
show_popup_message(
stacked_widget, "Please enter a valid account number.", search_index
)
return
if backend.check_acc_no(account_number):
account_data = backend.get_details(account_number)
name_field.setText(str(account_data[1]))
balance_field.setText(str(account_data[4]))
stacked_widget.setCurrentIndex(page_index)
else:
show_popup_message(
stacked_widget,
"Account not found",
search_index,
show_cancel=True,
cancel_page=EMPLOYEE_MENU_PAGE,
)
def on_action_submit():
try:
account_number = int(search_input.text().strip())
amount = int(amount_field.text().strip())
backend_action_fn(amount, account_number)
name_field.setText("")
balance_field.setText("")
search_input.setText("")
show_popup_message(stacked_widget, success_message, EMPLOYEE_MENU_PAGE)
except ValueError:
show_popup_message(
stacked_widget, "Enter valid numeric amount.", page_index
)
search_button.clicked.connect(on_search_submit)
action_button.clicked.connect(on_action_submit)
return search_page, form_page
# Add Balance Flow
add_balance_search_page, add_balance_page = setup_balance_operation_flow(
stacked_widget=stacked_widget,
title_search="Add Balance",
placeholder="Enter Account Number: ",
title_form="Add Balance User Account",
action_button_text="Enter Amount: ",
success_message="Balance updated successfully",
backend_action_fn=backend.update_balance,
stacked_page_index=EMPLOYEE_ADD_BALANCE_SEARCH,
search_index=EMPLOYEE_ADD_BALANCE_SEARCH,
page_index=EMPLOYEE_ADD_BALANCE_PAGE,
)
# Withdraw Money Flow
withdraw_money_search_page, withdraw_money_page = setup_balance_operation_flow(
stacked_widget=stacked_widget,
title_search="Withdraw Money",
placeholder="Enter Account Number: ",
title_form="Withdraw Money From User Account",
action_button_text="Withdraw Amount: ",
success_message="Amount withdrawn successfully",
backend_action_fn=backend.deduct_balance,
stacked_page_index=EMPLOYEE_WITHDRAW_MONEY_SEARCH,
search_index=EMPLOYEE_WITHDRAW_MONEY_SEARCH,
page_index=EMPLOYEE_WITHDRAW_MONEY_PAGE,
)
check_balance_search_page, check_balance_page = setup_balance_operation_flow(
stacked_widget=stacked_widget,
title_search="Check Balance",
placeholder="Enter Account Number: ",
title_form="Check Balance",
action_button_text="Check Balance: ",
success_message="Balance checked successfully",
backend_action_fn=backend.check_balance,
stacked_page_index=EMPLOYEE_CHECK_BALANCE_SEARCH,
search_index=EMPLOYEE_CHECK_BALANCE_SEARCH,
page_index=EMPLOYEE_CHECK_BALANCE_PAGE,
need_input=False,
)
def find_and_hide_submit_button(page):
# Find all QPushButton widgets in the page
buttons = page.findChildren(QtWidgets.QPushButton)
for button in buttons:
if button.text() == "Submit":
button.hide()
break
find_and_hide_submit_button(check_balance_page)
# Update Employee details
update_empolyee_search_page, update_empolyee_search_other = search_result(
stacked_widget, "Update Employee Details", "Enter Employee ID: "
)
update_employee_page, update_employee_other = create_account_page(
stacked_widget, "Update Employee", True
)
name_edit = update_employee_other[0]
Age_edit = update_employee_other[1]
Address_edit = update_employee_other[2]
Balance_edit = update_employee_other[3]
Mobile_number_edit = update_employee_other[4]
account_type_dropdown = update_employee_other[5]
# name_edit, Age_edit,Address_edit,Balance_edit,Mobile_number_edit, account_type_dropdown ,submit_button
update_empolyee_search_other[1].clicked.connect(
lambda: update_employee_search_submit()
)
update_employee_other[6].clicked.connect(lambda: update_employee_submit())
def update_employee_search_submit():
try:
user_data = backend.get_details(
int(update_empolyee_search_other[0].text().strip())
)
print("Featch data: ", user_data)
name_edit.setText(str(user_data[1]))
Age_edit.setText(str(user_data[2]))
Address_edit.setText(str(user_data[3]))
Balance_edit.setText(str(user_data[4]))
Mobile_number_edit.setText(str(user_data[6]))
Balance_edit.setDisabled(True)
account_type_dropdown.setCurrentText(str(user_data[5]))
stacked_widget.setCurrentIndex(EMPLOYEE_UPDATE_ACCOUNT_PAGE)
except ValueError:
show_popup_message(
stacked_widget, "Enter valid numeric employee ID.", EMPLOYEE_MENU_PAGE
)
def update_employee_submit():
try:
user_data = backend.get_details(
int(update_empolyee_search_other[0].text().strip())
)
name = name_edit.text().strip()
age = int(Age_edit.text().strip())
address = Address_edit.text().strip()
mobile_number = int(Mobile_number_edit.text().strip())
account_type = account_type_dropdown.currentText()
print(name, age, address, mobile_number, account_type)
backend.update_name_in_bank_table(name, user_data[0])
backend.update_age_in_bank_table(age, user_data[0])
backend.update_address_in_bank_table(address, user_data[0])
backend.update_address_in_bank_table(address, user_data[0])
backend.update_mobile_number_in_bank_table(mobile_number, user_data[0])
backend.update_acc_type_in_bank_table(account_type, user_data[0])
show_popup_message(
stacked_widget,
"Employee details updated successfully",
EMPLOYEE_MENU_PAGE,
)
stacked_widget.setCurrentIndex(EMPLOYEE_MENU_PAGE)
except ValueError as e:
print(e)
show_popup_message(
stacked_widget, "Enter valid numeric employee ID.", EMPLOYEE_MENU_PAGE
)
stacked_widget.addWidget(home_page) # 0
stacked_widget.addWidget(admin_page) # 1
stacked_widget.addWidget(employee_page) # 2
stacked_widget.addWidget(admin_menu_page) # 3
stacked_widget.addWidget(add_employee_page) # 4
stacked_widget.addWidget(u_employee_page1) # 5
stacked_widget.addWidget(u_employee_page2) # 6
stacked_widget.addWidget(employee_list_page) # 7
stacked_widget.addWidget(admin_total_money) # 8
stacked_widget.addWidget(employee_menu_page) # 9
stacked_widget.addWidget(employee_create_account_page) # 10
stacked_widget.addWidget(show_bank_user_data_page1) # 11
stacked_widget.addWidget(show_bank_user_data_page2) # 12
stacked_widget.addWidget(add_balance_search_page) # 13
stacked_widget.addWidget(add_balance_page) # 14
stacked_widget.addWidget(withdraw_money_search_page) # 15
stacked_widget.addWidget(withdraw_money_page) # 16
stacked_widget.addWidget(check_balance_search_page) # 17
stacked_widget.addWidget(check_balance_page) # 18
stacked_widget.addWidget(update_empolyee_search_page) # 19
stacked_widget.addWidget(update_employee_page) # 20
main_layout.addWidget(stacked_widget)
main_window.setCentralWidget(central_widget)
# Set initial page
stacked_widget.setCurrentIndex(9)
return stacked_widget, {
"admin_name": admin_name,
"admin_password": admin_password,
"admin_submit": admin_submit,
"employee_name": employee_name,
"employee_password": employee_password,
"employee_submit": employee_submit,
}
def main():
"""Main function to launch the application."""
app = QtWidgets.QApplication(sys.argv)
main_window = QtWidgets.QMainWindow()
stacked_widget, widgets = setup_main_window(main_window)
# Example: Connect submit buttons to print input values
main_window.show()
sys.exit(app.exec_())
# -------------------------------------------------------------------------------------------------------------
if __name__ == "__main__":
main()
# TO-DO:
# 1.refese the employee list page after add or delete or update employee
| {
"repo_id": "geekcomputers/Python",
"file_path": "bank_managment_system/QTFrontend.py",
"license": "MIT License",
"lines": 1562,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getredash/redash:tests/destinations/test_webhook.py | import json
from unittest import mock
from redash.destinations.webhook import Webhook
from redash.models import Alert
def test_webhook_notify_handles_unicode():
# Create a mock alert with all the properties needed by serialize_alert
alert = mock.Mock()
alert.id = 1
alert.name = "Test Alert"
alert.custom_subject = "Test Subject With Unicode: 晨"
alert.custom_body = "Test Body"
alert.options = {}
alert.state = "ok"
alert.last_triggered_at = None
alert.updated_at = "2025-12-02T08:00:00Z"
alert.created_at = "2025-12-02T08:00:00Z"
alert.rearm = None
alert.query_id = 10
alert.user_id = 20
query = mock.Mock()
user = mock.Mock()
app = mock.Mock()
host = "http://redash.local"
options = {"url": "https://example.com/webhook", "username": "user", "password": "password"}
metadata = {}
new_state = Alert.TRIGGERED_STATE
destination = Webhook(options)
with mock.patch("redash.destinations.webhook.requests.post") as mock_post:
mock_response = mock.Mock()
mock_response.status_code = 200
mock_post.return_value = mock_response
destination.notify(alert, query, user, new_state, app, host, metadata, options)
# Get the data passed to the mock
call_args, call_kwargs = mock_post.call_args
sent_data = call_kwargs["data"]
# 1. Make sure we send bytes
assert isinstance(sent_data, bytes)
# 2. Make sure the bytes are the correct UTF-8 encoded JSON
decoded_data = json.loads(sent_data.decode("utf-8"))
assert decoded_data["alert"]["title"] == alert.custom_subject
assert "Test Subject With Unicode: 晨" in sent_data.decode("utf-8")
| {
"repo_id": "getredash/redash",
"file_path": "tests/destinations/test_webhook.py",
"license": "BSD 2-Clause \"Simplified\" License",
"lines": 41,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
getredash/redash:redash/query_runner/duckdb.py | import logging
from redash.query_runner import (
TYPE_BOOLEAN,
TYPE_DATE,
TYPE_DATETIME,
TYPE_FLOAT,
TYPE_INTEGER,
TYPE_STRING,
BaseSQLQueryRunner,
InterruptException,
register,
)
logger = logging.getLogger(__name__)
try:
import duckdb
enabled = True
except ImportError:
enabled = False
# Map DuckDB types to Redash column types
TYPES_MAP = {
"BOOLEAN": TYPE_BOOLEAN,
"TINYINT": TYPE_INTEGER,
"SMALLINT": TYPE_INTEGER,
"INTEGER": TYPE_INTEGER,
"BIGINT": TYPE_INTEGER,
"HUGEINT": TYPE_INTEGER,
"REAL": TYPE_FLOAT,
"DOUBLE": TYPE_FLOAT,
"DECIMAL": TYPE_FLOAT,
"VARCHAR": TYPE_STRING,
"BLOB": TYPE_STRING,
"DATE": TYPE_DATE,
"TIMESTAMP": TYPE_DATETIME,
"TIMESTAMP WITH TIME ZONE": TYPE_DATETIME,
"TIME": TYPE_DATETIME,
"INTERVAL": TYPE_STRING,
"UUID": TYPE_STRING,
"JSON": TYPE_STRING,
"STRUCT": TYPE_STRING,
"MAP": TYPE_STRING,
"UNION": TYPE_STRING,
}
class DuckDB(BaseSQLQueryRunner):
noop_query = "SELECT 1"
def __init__(self, configuration):
super().__init__(configuration)
self.dbpath = configuration.get("dbpath", ":memory:")
exts = configuration.get("extensions", "")
self.extensions = [e.strip() for e in exts.split(",") if e.strip()]
self._connect()
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"dbpath": {
"type": "string",
"title": "Database Path",
"default": ":memory:",
},
"extensions": {
"type": "string",
"title": "Extensions (comma separated)",
},
},
"order": ["dbpath", "extensions"],
"required": ["dbpath"],
}
@classmethod
def enabled(cls) -> bool:
return enabled
def _connect(self) -> None:
self.con = duckdb.connect(self.dbpath)
for ext in self.extensions:
try:
if "." in ext:
prefix, name = ext.split(".", 1)
if prefix == "community":
self.con.execute(f"INSTALL {name} FROM community")
self.con.execute(f"LOAD {name}")
else:
raise Exception("Unknown extension prefix.")
else:
self.con.execute(f"INSTALL {ext}")
self.con.execute(f"LOAD {ext}")
except Exception as e:
logger.warning("Failed to load extension %s: %s", ext, e)
def run_query(self, query, user) -> tuple:
try:
cursor = self.con.cursor()
cursor.execute(query)
columns = self.fetch_columns(
[(d[0], TYPES_MAP.get(d[1].upper(), TYPE_STRING)) for d in cursor.description]
)
rows = [dict(zip((col["name"] for col in columns), row)) for row in cursor.fetchall()]
data = {"columns": columns, "rows": rows}
return data, None
except duckdb.InterruptException:
raise InterruptException("Query cancelled by user.")
except Exception as e:
logger.exception("Error running query: %s", e)
return None, str(e)
def get_schema(self, get_stats=False) -> list:
tables_query = """
SELECT table_catalog, table_schema, table_name FROM information_schema.tables
WHERE table_schema NOT IN ('information_schema', 'pg_catalog');
"""
tables_results, error = self.run_query(tables_query, None)
if error:
raise Exception(f"Failed to get tables: {error}")
schema = {}
for table_row in tables_results["rows"]:
# Include catalog (database) in the full table name for MotherDuck support
catalog = table_row["table_catalog"]
schema_name = table_row["table_schema"]
table_name = table_row["table_name"]
# Skip catalog prefix for default local databases (memory, temp)
# but include it for MotherDuck and attached databases
if catalog.lower() in ("memory", "temp", "system"):
full_table_name = f"{schema_name}.{table_name}"
describe_query = f'DESCRIBE "{schema_name}"."{table_name}";'
else:
full_table_name = f"{catalog}.{schema_name}.{table_name}"
describe_query = f'DESCRIBE "{catalog}"."{schema_name}"."{table_name}";'
schema[full_table_name] = {"name": full_table_name, "columns": []}
columns_results, error = self.run_query(describe_query, None)
if error:
logger.warning("Failed to describe table %s: %s", full_table_name, error)
continue
for col_row in columns_results["rows"]:
col = {"name": col_row["column_name"], "type": col_row["column_type"]}
schema[full_table_name]["columns"].append(col)
if col_row["column_type"].startswith("STRUCT("):
schema[full_table_name]["columns"].extend(
self._expand_struct_fields(col["name"], col_row["column_type"])
)
return list(schema.values())
def _expand_struct_fields(self, base_name: str, struct_type: str) -> list:
"""Recursively expand STRUCT(...) definitions into pseudo-columns."""
fields = []
# strip STRUCT( ... )
inner = struct_type[len("STRUCT(") : -1].strip()
# careful: nested structs, so parse comma-separated parts properly
depth, current, parts = 0, [], []
for c in inner:
if c == "(":
depth += 1
elif c == ")":
depth -= 1
if c == "," and depth == 0:
parts.append("".join(current).strip())
current = []
else:
current.append(c)
if current:
parts.append("".join(current).strip())
for part in parts:
# each part looks like: "fieldname TYPE"
fname, ftype = part.split(" ", 1)
colname = f"{base_name}.{fname}"
fields.append({"name": colname, "type": ftype})
if ftype.startswith("STRUCT("):
fields.extend(self._expand_struct_fields(colname, ftype))
return fields
register(DuckDB)
| {
"repo_id": "getredash/redash",
"file_path": "redash/query_runner/duckdb.py",
"license": "BSD 2-Clause \"Simplified\" License",
"lines": 165,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getredash/redash:tests/query_runner/test_duckdb.py | from unittest import TestCase
from unittest.mock import patch
from redash.query_runner.duckdb import DuckDB
class TestDuckDBSchema(TestCase):
def setUp(self) -> None:
self.runner = DuckDB({"dbpath": ":memory:"})
@patch.object(DuckDB, "run_query")
def test_simple_schema_build(self, mock_run_query) -> None:
# Simulate queries: first for tables, then for DESCRIBE
mock_run_query.side_effect = [
(
{
"rows": [
{
"table_catalog": "memory",
"table_schema": "main",
"table_name": "users",
}
]
},
None,
),
(
{
"rows": [
{"column_name": "id", "column_type": "INTEGER"},
{"column_name": "name", "column_type": "VARCHAR"},
]
},
None,
),
]
schema = self.runner.get_schema()
self.assertEqual(len(schema), 1)
self.assertEqual(schema[0]["name"], "main.users")
self.assertListEqual(
schema[0]["columns"],
[{"name": "id", "type": "INTEGER"}, {"name": "name", "type": "VARCHAR"}],
)
@patch.object(DuckDB, "run_query")
def test_struct_column_expansion(self, mock_run_query) -> None:
# First call to run_query -> tables list
mock_run_query.side_effect = [
(
{
"rows": [
{
"table_catalog": "memory",
"table_schema": "main",
"table_name": "events",
}
]
},
None,
),
# Second call -> DESCRIBE output
(
{
"rows": [
{
"column_name": "payload",
"column_type": "STRUCT(a INTEGER, b VARCHAR)",
}
]
},
None,
),
]
schema_list = self.runner.get_schema()
self.assertEqual(len(schema_list), 1)
schema = schema_list[0]
# Ensure both raw and expanded struct fields are present
self.assertIn("main.events", schema["name"])
self.assertListEqual(
schema["columns"],
[
{"name": "payload", "type": "STRUCT(a INTEGER, b VARCHAR)"},
{"name": "payload.a", "type": "INTEGER"},
{"name": "payload.b", "type": "VARCHAR"},
],
)
def test_nested_struct_expansion(self) -> None:
runner = DuckDB({"dbpath": ":memory:"})
runner.con.execute(
"""
CREATE TABLE sample_struct_table (
id INTEGER,
info STRUCT(
name VARCHAR,
metrics STRUCT(score DOUBLE, rank INTEGER),
tags STRUCT(primary_tag VARCHAR, secondary_tag VARCHAR)
)
);
"""
)
schema = runner.get_schema()
table = next(t for t in schema if t["name"] == "main.sample_struct_table")
colnames = [c["name"] for c in table["columns"]]
assert "info" in colnames
assert 'info."name"' in colnames
assert "info.metrics" in colnames
assert "info.metrics.score" in colnames
assert "info.metrics.rank" in colnames
assert "info.tags.primary_tag" in colnames
assert "info.tags.secondary_tag" in colnames
@patch.object(DuckDB, "run_query")
def test_motherduck_catalog_included(self, mock_run_query) -> None:
# Test that non-default catalogs (like MotherDuck) include catalog in name
mock_run_query.side_effect = [
(
{
"rows": [
{
"table_catalog": "sample_data",
"table_schema": "kaggle",
"table_name": "movies",
}
]
},
None,
),
(
{
"rows": [
{"column_name": "title", "column_type": "VARCHAR"},
]
},
None,
),
]
schema = self.runner.get_schema()
self.assertEqual(len(schema), 1)
# Should include catalog name for non-default catalogs
self.assertEqual(schema[0]["name"], "sample_data.kaggle.movies")
@patch.object(DuckDB, "run_query")
def test_error_propagation(self, mock_run_query) -> None:
mock_run_query.return_value = (None, "boom")
with self.assertRaises(Exception) as ctx:
self.runner.get_schema()
self.assertIn("boom", str(ctx.exception))
| {
"repo_id": "getredash/redash",
"file_path": "tests/query_runner/test_duckdb.py",
"license": "BSD 2-Clause \"Simplified\" License",
"lines": 140,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
getredash/redash:tests/query_runner/test_azure_kusto.py | from unittest import TestCase
from unittest.mock import patch
from redash.query_runner.azure_kusto import AzureKusto
class TestAzureKusto(TestCase):
def setUp(self):
self.configuration = {
"cluster": "https://example.kusto.windows.net",
"database": "sample_db",
"azure_ad_client_id": "client_id",
"azure_ad_client_secret": "client_secret",
"azure_ad_tenant_id": "tenant_id",
}
self.kusto = AzureKusto(self.configuration)
@patch.object(AzureKusto, "run_query")
def test_get_schema(self, mock_run_query):
mock_response = {
"rows": [
{
"DatabaseSchema": '{"Databases":{"sample_db":{"Tables":{"Table1":{"Name":"Table1","OrderedColumns":[{"Name":"Column1","Type":"System.String","CslType":"string"},{"Name":"Column2","Type":"System.DateTime","CslType":"datetime"}]}},"MaterializedViews":{"View1":{"Name":"View1","OrderedColumns":[{"Name":"Column1","Type":"System.String","CslType":"string"},{"Name":"Column2","Type":"System.DateTime","CslType":"datetime"}]}}}}}'
}
]
}
mock_run_query.return_value = (mock_response, None)
expected_schema = [
{
"name": "Table1",
"columns": [{"name": "Column1", "type": "string"}, {"name": "Column2", "type": "datetime"}],
},
{
"name": "View1",
"columns": [{"name": "Column1", "type": "string"}, {"name": "Column2", "type": "datetime"}],
},
]
schema = self.kusto.get_schema()
print(schema)
self.assertEqual(schema, expected_schema)
| {
"repo_id": "getredash/redash",
"file_path": "tests/query_runner/test_azure_kusto.py",
"license": "BSD 2-Clause \"Simplified\" License",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
getzep/graphiti:examples/gliner2/gliner2_neo4j.py | """
Copyright 2025, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import asyncio
import json
import logging
import os
from datetime import datetime, timezone
from logging import INFO
from dotenv import load_dotenv
from pydantic import BaseModel, Field
from graphiti_core import Graphiti
from graphiti_core.embedder.gemini import GeminiEmbedder, GeminiEmbedderConfig
from graphiti_core.llm_client.config import LLMConfig
from graphiti_core.llm_client.gemini_client import GeminiClient
from graphiti_core.llm_client.gliner2_client import GLiNER2Client
from graphiti_core.nodes import EpisodeType
#################################################
# CUSTOM ENTITY TYPES
#################################################
# Define Pydantic models for entity classification.
# GLiNER2 uses the class docstrings as label
# descriptions for improved extraction accuracy.
# The LLM client uses these for edge extraction
# and summarization.
#################################################
class Person(BaseModel):
"""A human person, real or fictional."""
occupation: str | None = Field(None, description='Professional role or job title')
political_party: str | None = Field(None, description='Political party affiliation')
class Organization(BaseModel):
"""An organization such as a company, government agency, university, or political party."""
org_type: str | None = Field(
None, description='Type of organization (e.g., bank, university, government agency)'
)
class Location(BaseModel):
"""A geographic location such as a city, state, or country."""
location_type: str | None = Field(
None, description='Type of location (e.g., city, state, county)'
)
class Initiative(BaseModel):
"""A program, policy, initiative, or legal action."""
description: str | None = Field(None, description='Brief description of the initiative')
entity_types: dict[str, type[BaseModel]] = {
'Person': Person,
'Organization': Organization,
'Location': Location,
'Initiative': Initiative,
}
#################################################
# CONFIGURATION
#################################################
# GLiNER2 is a lightweight extraction model
# (205M-340M params) that runs locally on CPU.
# It handles entity extraction (NER), while an
# OpenAI client handles edge/fact extraction,
# deduplication, summarization, and reasoning.
#################################################
# Configure logging
logging.basicConfig(
level=INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
)
logger = logging.getLogger(__name__)
load_dotenv()
# Neo4j connection parameters
neo4j_uri = os.environ.get('NEO4J_URI')
neo4j_user = os.environ.get('NEO4J_USER')
neo4j_password = os.environ.get('NEO4J_PASSWORD')
if not neo4j_uri or not neo4j_user or not neo4j_password:
raise ValueError('NEO4J_URI, NEO4J_USER, and NEO4J_PASSWORD must be set')
# GLiNER2 model configuration
gliner2_model = os.environ.get('GLINER2_MODEL', 'fastino/gliner2-large-v1')
async def main():
#################################################
# INITIALIZATION
#################################################
# Set up a hybrid LLM client: GLiNER2 handles
# entity extraction locally using custom entity
# types as labels, while OpenAI handles edge/fact
# extraction, deduplication, and summarization.
#################################################
# Create the Gemini client for reasoning tasks
gemini_client = GeminiClient(
config=LLMConfig(
api_key=os.environ.get('GOOGLE_API_KEY'),
model='gemini-2.5-flash-lite',
small_model='gemini-2.5-flash-lite',
),
)
# Create the GLiNER2 hybrid client
gliner2_client = GLiNER2Client(
config=LLMConfig(model=gliner2_model),
llm_client=gemini_client,
threshold=0.7,
)
# Create the Gemini embedder
gemini_embedder = GeminiEmbedder(
config=GeminiEmbedderConfig(
api_key=os.environ.get('GOOGLE_API_KEY'),
embedding_model='gemini-embedding-001',
),
)
# Initialize Graphiti with the GLiNER2 hybrid client and Gemini embedder
graphiti = Graphiti(
neo4j_uri,
neo4j_user,
neo4j_password,
llm_client=gliner2_client,
embedder=gemini_embedder,
)
try:
#################################################
# ADDING EPISODES
#################################################
# Entity extraction from these episodes will be
# handled by GLiNER2 locally using the custom
# entity types as labels. Edge/fact extraction,
# deduplication, and summarization are delegated
# to OpenAI.
#################################################
episodes = [
# English: detailed political biography
{
'content': (
'Kamala Harris is the Attorney General of California. She was previously '
'the district attorney for San Francisco. Harris graduated from Howard '
'University in 1986 and earned her law degree from the University of '
'California, Hastings College of the Law in 1989. Before entering politics, '
'she worked as a deputy district attorney in Alameda County under District '
'Attorney John Orlovsky. In 2003, she defeated incumbent Terence Hallinan '
'to become San Francisco District Attorney, making her the first woman and '
'first African American to hold the position.'
),
'type': EpisodeType.text,
'description': 'podcast transcript',
},
{
'content': (
'As AG, Harris was in office from January 3, 2011 to January 3, 2017. '
'During her tenure she launched the OpenJustice initiative, a data platform '
'for criminal justice statistics across California. She also led a $25 billion '
'national mortgage settlement against Bank of America, JPMorgan Chase, Wells '
'Fargo, Citigroup, and Ally Financial on behalf of homeowners affected by '
'the foreclosure crisis.'
),
'type': EpisodeType.text,
'description': 'podcast transcript',
},
# Spanish: same entities (Kamala Harris, California, San Francisco)
{
'content': (
'Kamala Harris fue la Fiscal General de California entre 2011 y 2017. '
'Anteriormente se desempeñó como fiscal de distrito de San Francisco. '
'Harris es graduada de la Universidad Howard y obtuvo su título de abogada '
'en la Facultad de Derecho Hastings de la Universidad de California. Durante '
'su mandato como Fiscal General, impulsó reformas en el sistema de justicia '
'penal del estado.'
),
'type': EpisodeType.text,
'description': 'artículo de noticias',
},
# French: same entities (Kamala Harris, California, San Francisco)
{
'content': (
'Kamala Harris a été procureure générale de Californie de 2011 à 2017. '
'Avant cela, elle a occupé le poste de procureure du district de '
'San Francisco. Elle est diplômée de l\'Université Howard et a obtenu '
'son diplôme de droit au Hastings College of the Law de l\'Université de '
'Californie. En tant que procureure générale, elle a négocié un accord '
'national de 25 milliards de dollars avec les grandes banques américaines.'
),
'type': EpisodeType.text,
'description': 'article de presse',
},
# JSON: structured political metadata
{
'content': {
'name': 'Gavin Newsom',
'position': 'Governor',
'state': 'California',
'previous_role': 'Lieutenant Governor',
'previous_location': 'San Francisco',
'party': 'Democratic Party',
'took_office': '2019-01-07',
'predecessor': 'Jerry Brown',
},
'type': EpisodeType.json,
'description': 'political leadership metadata',
},
# Portuguese: overlapping entities (California, San Francisco, Gavin Newsom)
{
'content': (
'Gavin Newsom é o governador da Califórnia desde janeiro de 2019. '
'Antes disso, ele foi prefeito de San Francisco de 2004 a 2011 e '
'vice-governador da Califórnia de 2011 a 2019. Newsom é membro do '
'Partido Democrata e tem promovido políticas progressistas em áreas '
'como mudanças climáticas, imigração e reforma da justiça criminal.'
),
'type': EpisodeType.text,
'description': 'perfil político',
},
]
for i, episode in enumerate(episodes):
result = await graphiti.add_episode(
name=f'California Politics {i}',
episode_body=(
episode['content']
if isinstance(episode['content'], str)
else json.dumps(episode['content'])
),
source=episode['type'],
source_description=episode['description'],
reference_time=datetime.now(timezone.utc),
entity_types=entity_types,
)
print(f'\n--- Episode: California Politics {i} ({episode["type"].value}) ---')
if result.nodes:
print(f' Entities ({len(result.nodes)}):')
for node in result.nodes:
labels_str = ', '.join(node.labels) if node.labels else 'Entity'
print(f' - {node.name} [{labels_str}]')
if node.summary:
print(f' Summary: {node.summary}')
if node.attributes:
print(f' Attributes: {node.attributes}')
if result.edges:
print(f' Edges ({len(result.edges)}):')
for edge in result.edges:
temporal = ''
if edge.valid_at:
temporal += f' (valid: {edge.valid_at.isoformat()})'
if edge.invalid_at:
temporal += f' (invalid: {edge.invalid_at.isoformat()})'
print(f' - [{edge.name}] {edge.fact}{temporal}')
#################################################
# SEARCH
#################################################
queries = [
'Who was the California Attorney General?',
'What banks were involved in the mortgage settlement?',
'What is the relationship between Kamala Harris and San Francisco?',
]
for query in queries:
print(f"\nSearching for: '{query}'")
results = await graphiti.search(query)
print('Results:')
for result in results:
print(f' Fact: {result.fact}')
if hasattr(result, 'valid_at') and result.valid_at:
print(f' Valid from: {result.valid_at}')
if hasattr(result, 'invalid_at') and result.invalid_at:
print(f' Valid until: {result.invalid_at}')
print(' ---')
#################################################
# ENTITY EXTRACTION LATENCY
#################################################
latencies = gliner2_client.extraction_latencies
if latencies:
print(f'\nGLiNER2 entity extraction latency ({len(latencies)} calls):')
print(f' Mean: {sum(latencies) / len(latencies):.1f} ms')
print(f' Min: {min(latencies):.1f} ms')
print(f' Max: {max(latencies):.1f} ms')
print(f' Total: {sum(latencies):.1f} ms')
finally:
await graphiti.close()
print('\nConnection closed')
if __name__ == '__main__':
asyncio.run(main())
| {
"repo_id": "getzep/graphiti",
"file_path": "examples/gliner2/gliner2_neo4j.py",
"license": "Apache License 2.0",
"lines": 279,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/llm_client/gliner2_client.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import ast
import asyncio
import json
import logging
import re
import typing
from time import perf_counter
from typing import TYPE_CHECKING
from pydantic import BaseModel
from ..prompts.models import Message
from .client import LLMClient
from .config import DEFAULT_MAX_TOKENS, LLMConfig, ModelSize
from .errors import RateLimitError
if TYPE_CHECKING:
from gliner2 import GLiNER2 # type: ignore[import-untyped]
else:
try:
from gliner2 import GLiNER2 # type: ignore[import-untyped]
except ImportError:
raise ImportError(
'gliner2 is required for GLiNER2Client. '
'Install it with: pip install graphiti-core[gliner2]'
) from None
logger = logging.getLogger(__name__)
DEFAULT_MODEL = 'fastino/gliner2-base-v1'
DEFAULT_THRESHOLD = 0.5
# Response model that GLiNER2 handles natively
_ENTITY_EXTRACTION_MODEL = 'ExtractedEntities'
class GLiNER2Client(LLMClient):
"""LLM client that uses GLiNER2 for entity extraction.
GLiNER2 is a lightweight extraction model (205M-340M params) that handles
named entity recognition locally on CPU. All other operations (edge/relation
extraction, deduplication, summarization, etc.) are delegated to the
required llm_client.
Note: When using local models (no base_url), initialization loads model
weights synchronously. Create this client before entering the async
event loop (e.g., before ``asyncio.run()``).
"""
def __init__(
self,
config: LLMConfig | None = None,
cache: bool = False,
threshold: float = DEFAULT_THRESHOLD,
include_confidence: bool = False,
llm_client: LLMClient | None = None,
) -> None:
if llm_client is None:
raise ValueError(
'llm_client is required. GLiNER2 cannot handle all operations '
'(deduplication, summarization, etc.) and must delegate to a '
'general-purpose LLM client.'
)
if config is None:
config = LLMConfig()
super().__init__(config, cache)
self.threshold = threshold
self.include_confidence = include_confidence
self.llm_client = llm_client
self.extraction_latencies: list[float] = []
model_id = config.model or DEFAULT_MODEL
small_model_id = config.small_model or model_id
if config.base_url:
logger.info('Initializing GLiNER2 in API mode: %s', config.base_url)
self._model = GLiNER2.from_api(
api_key=config.api_key or '',
api_base_url=config.base_url,
)
self._small_model = self._model
else:
logger.info('Loading GLiNER2 model: %s', model_id)
self._model = GLiNER2.from_pretrained(model_id)
if small_model_id != model_id:
logger.info('Loading GLiNER2 small model: %s', small_model_id)
self._small_model = GLiNER2.from_pretrained(small_model_id)
else:
self._small_model = self._model
def _get_model_for_size(self, model_size: ModelSize) -> typing.Any:
if model_size == ModelSize.small:
return self._small_model
return self._model
def _get_provider_type(self) -> str:
return 'gliner2'
# ── Message parsing helpers ──────────────────────────────────────
@staticmethod
def _extract_text_from_messages(messages: list[Message]) -> str:
"""Extract the raw text content from the message list for GLiNER2 processing."""
user_content = messages[-1].content if len(messages) > 1 else messages[0].content
# Try known XML tags in priority order
for tag in [
'CURRENT MESSAGE',
'CURRENT_MESSAGE',
'TEXT',
'JSON',
]:
pattern = rf'<{re.escape(tag)}>\s*(.*?)\s*</{re.escape(tag)}>'
match = re.search(pattern, user_content, re.DOTALL)
if match:
return match.group(1).strip()
# Fallback: return the full user content
return user_content
@staticmethod
def _extract_entity_labels(messages: list[Message]) -> tuple[dict[str, str], dict[str, int]]:
"""Extract entity type labels and id mappings from the message.
Returns:
Tuple of (labels_dict, label_to_id) where labels_dict maps
entity_type_name → entity_type_description and label_to_id maps
entity_type_name → entity_type_id.
"""
user_content = messages[-1].content if len(messages) > 1 else messages[0].content
match = re.search(
r'<ENTITY TYPES>\s*(.*?)\s*</ENTITY TYPES>', user_content, re.DOTALL
)
if match:
try:
raw = match.group(1)
# Prompt templates interpolate Python list[dict] directly,
# producing Python repr (single quotes, None) rather than JSON.
try:
entity_types = json.loads(raw)
except json.JSONDecodeError:
entity_types = ast.literal_eval(raw)
labels_dict: dict[str, str] = {}
label_to_id: dict[str, int] = {}
for et in entity_types:
name = et['entity_type_name']
labels_dict[name] = et.get('entity_type_description') or ''
label_to_id[name] = et['entity_type_id']
return labels_dict, label_to_id
except (json.JSONDecodeError, KeyError, ValueError, SyntaxError):
logger.warning('Failed to parse <ENTITY TYPES> from message')
return {'Entity': 'General entity'}, {'Entity': 0}
# ── Extraction handlers ──────────────────────────────────────────
async def _handle_entity_extraction(
self,
model: typing.Any,
text: str,
messages: list[Message],
) -> dict[str, typing.Any]:
"""Handle entity extraction using GLiNER2.
Maps GLiNER2 output format to Graphiti's ExtractedEntities format.
"""
labels_dict, label_to_id = self._extract_entity_labels(messages)
result = await asyncio.to_thread(
model.extract_entities,
text,
labels_dict,
threshold=self.threshold,
include_confidence=self.include_confidence,
)
extracted_entities: list[dict[str, typing.Any]] = []
entities_dict = result.get('entities', {})
for entity_type, entity_items in entities_dict.items():
entity_type_id = label_to_id.get(entity_type, 0)
for item in entity_items:
# GLiNER2 returns strings or dicts (when include_confidence=True)
name = item.get('text', '') if isinstance(item, dict) else str(item)
if name:
extracted_entities.append({
'name': name,
'entity_type_id': entity_type_id,
})
return {'extracted_entities': extracted_entities}
# ── Core dispatch ────────────────────────────────────────────────
def _is_gliner2_operation(self, response_model: type[BaseModel] | None) -> bool:
"""Determine if the response_model maps to a GLiNER2-native operation."""
if response_model is None:
return False
return response_model.__name__ == _ENTITY_EXTRACTION_MODEL
async def _generate_response(
self,
messages: list[Message],
response_model: type[BaseModel] | None = None,
max_tokens: int = DEFAULT_MAX_TOKENS,
model_size: ModelSize = ModelSize.medium,
) -> dict[str, typing.Any]:
model = self._get_model_for_size(model_size)
text = self._extract_text_from_messages(messages)
if not text:
logger.warning('No text extracted from messages for GLiNER2 processing')
return {'extracted_entities': []}
try:
t0 = perf_counter()
result = await self._handle_entity_extraction(model, text, messages)
latency_ms = (perf_counter() - t0) * 1000
self.extraction_latencies.append(latency_ms)
logger.info('GLiNER2 entity extraction: %.1f ms', latency_ms)
return result
except Exception as e:
error_msg = str(e).lower()
if 'rate limit' in error_msg or '429' in error_msg:
raise RateLimitError(f'GLiNER2 API rate limit: {e}') from e
if 'authentication' in error_msg or 'unauthorized' in error_msg:
raise
logger.error('GLiNER2 extraction error: %s', e)
raise
async def generate_response(
self,
messages: list[Message],
response_model: type[BaseModel] | None = None,
max_tokens: int | None = None,
model_size: ModelSize = ModelSize.medium,
group_id: str | None = None,
prompt_name: str | None = None,
) -> dict[str, typing.Any]:
# Delegate non-extraction operations to the LLM client
if not self._is_gliner2_operation(response_model):
return await self.llm_client.generate_response(
messages,
response_model=response_model,
max_tokens=max_tokens,
model_size=model_size,
group_id=group_id,
prompt_name=prompt_name,
)
if max_tokens is None:
max_tokens = self.max_tokens
# Clean input (still useful for the text we extract)
for message in messages:
message.content = self._clean_input(message.content)
with self.tracer.start_span('llm.generate') as span:
attributes: dict[str, typing.Any] = {
'llm.provider': 'gliner2',
'model.size': model_size.value,
'cache.enabled': self.cache_enabled,
}
if prompt_name:
attributes['prompt.name'] = prompt_name
span.add_attributes(attributes)
# Check cache
if self.cache_enabled and self.cache_dir is not None:
cache_key = self._get_cache_key(messages)
cached_response = self.cache_dir.get(cache_key)
if cached_response is not None:
logger.debug('Cache hit for %s', cache_key)
span.add_attributes({'cache.hit': True})
return cached_response
span.add_attributes({'cache.hit': False})
try:
response = await self._generate_response_with_retry(
messages, response_model, max_tokens, model_size
)
# Approximate token usage (GLiNER2 doesn't report actual tokens)
text = self._extract_text_from_messages(messages)
input_tokens = len(text) // 4
output_tokens = len(json.dumps(response)) // 4
self.token_tracker.record(
prompt_name or 'unknown',
input_tokens,
output_tokens,
)
except Exception as e:
span.set_status('error', str(e))
span.record_exception(e)
raise
# Cache response
if self.cache_enabled and self.cache_dir is not None:
cache_key = self._get_cache_key(messages)
self.cache_dir.set(cache_key, response)
return response
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/llm_client/gliner2_client.py",
"license": "Apache License 2.0",
"lines": 271,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/llm_client/cache.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import contextlib
import json
import logging
import os
import sqlite3
import typing
logger = logging.getLogger(__name__)
class LLMCache:
"""Simple SQLite + JSON cache for LLM responses.
Replaces diskcache to avoid unsafe pickle deserialization (CVE in diskcache <= 5.6.3).
Only stores JSON-serializable data.
"""
def __init__(self, directory: str):
os.makedirs(directory, exist_ok=True)
db_path = os.path.join(directory, 'cache.db')
self._conn = sqlite3.connect(db_path, check_same_thread=False)
self._conn.execute('CREATE TABLE IF NOT EXISTS cache (key TEXT PRIMARY KEY, value TEXT)')
self._conn.commit()
def get(self, key: str) -> dict[str, typing.Any] | None:
row = self._conn.execute('SELECT value FROM cache WHERE key = ?', (key,)).fetchone()
if row is None:
return None
try:
return json.loads(row[0])
except json.JSONDecodeError:
logger.warning(f'Corrupted cache entry for key {key}, ignoring')
return None
def set(self, key: str, value: dict[str, typing.Any]) -> None:
try:
serialized = json.dumps(value)
except TypeError:
logger.warning(f'Non-JSON-serializable cache value for key {key}, skipping')
return
self._conn.execute(
'INSERT OR REPLACE INTO cache (key, value) VALUES (?, ?)',
(key, serialized),
)
self._conn.commit()
def close(self) -> None:
self._conn.close()
def __del__(self) -> None:
with contextlib.suppress(Exception):
self._conn.close()
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/llm_client/cache.py",
"license": "Apache License 2.0",
"lines": 55,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
getzep/graphiti:tests/llm_client/test_cache.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import pytest
from graphiti_core.llm_client.cache import LLMCache
@pytest.fixture
def cache(tmp_path):
"""Create an LLMCache using a temporary directory."""
c = LLMCache(str(tmp_path / 'test_cache'))
yield c
c.close()
class TestLLMCache:
def test_get_missing_key_returns_none(self, cache):
"""Test that getting a nonexistent key returns None."""
assert cache.get('nonexistent') is None
def test_set_and_get(self, cache):
"""Test basic set and get round-trip."""
value = {'content': 'hello', 'tokens': 42}
cache.set('key1', value)
assert cache.get('key1') == value
def test_set_overwrites_existing(self, cache):
"""Test that setting the same key overwrites the previous value."""
cache.set('key1', {'version': 1})
cache.set('key1', {'version': 2})
assert cache.get('key1') == {'version': 2}
def test_multiple_keys(self, cache):
"""Test storing and retrieving multiple distinct keys."""
cache.set('a', {'val': 1})
cache.set('b', {'val': 2})
cache.set('c', {'val': 3})
assert cache.get('a') == {'val': 1}
assert cache.get('b') == {'val': 2}
assert cache.get('c') == {'val': 3}
def test_complex_nested_value(self, cache):
"""Test that complex nested JSON structures survive round-trip."""
value = {
'choices': [{'message': {'role': 'assistant', 'content': 'test'}}],
'usage': {'prompt_tokens': 10, 'completion_tokens': 5},
'nested': {'a': [1, 2, 3], 'b': None, 'c': True},
}
cache.set('complex', value)
assert cache.get('complex') == value
def test_non_serializable_value_is_skipped(self, cache):
"""Test that non-JSON-serializable values are silently skipped."""
cache.set('bad', {'func': lambda x: x}) # type: ignore
assert cache.get('bad') is None
def test_corrupted_entry_returns_none(self, cache):
"""Test that a corrupted (non-JSON) cache entry returns None."""
# Directly insert invalid JSON into the database
cache._conn.execute(
'INSERT OR REPLACE INTO cache (key, value) VALUES (?, ?)',
('corrupt', 'not valid json{{{'),
)
cache._conn.commit()
assert cache.get('corrupt') is None
def test_creates_directory(self, tmp_path):
"""Test that LLMCache creates the directory if it doesn't exist."""
cache_dir = str(tmp_path / 'nested' / 'dir' / 'cache')
c = LLMCache(cache_dir)
try:
assert os.path.isdir(cache_dir)
assert os.path.isfile(os.path.join(cache_dir, 'cache.db'))
finally:
c.close()
def test_persistence_across_instances(self, tmp_path):
"""Test that data persists when opening a new LLMCache on the same directory."""
cache_dir = str(tmp_path / 'persist_cache')
c1 = LLMCache(cache_dir)
c1.set('persist_key', {'data': 'survives'})
c1.close()
c2 = LLMCache(cache_dir)
try:
assert c2.get('persist_key') == {'data': 'survives'}
finally:
c2.close()
def test_close_and_del(self, tmp_path):
"""Test that close() and __del__ don't raise exceptions."""
c = LLMCache(str(tmp_path / 'close_test'))
c.close()
# Calling close again via __del__ should not raise
c.__del__()
| {
"repo_id": "getzep/graphiti",
"file_path": "tests/llm_client/test_cache.py",
"license": "Apache License 2.0",
"lines": 91,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
getzep/graphiti:graphiti_core/driver/kuzu/operations/community_edge_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from typing import Any
from graphiti_core.driver.driver import GraphProvider
from graphiti_core.driver.operations.community_edge_ops import CommunityEdgeOperations
from graphiti_core.driver.query_executor import QueryExecutor, Transaction
from graphiti_core.edges import CommunityEdge
from graphiti_core.errors import EdgeNotFoundError
from graphiti_core.helpers import parse_db_date
from graphiti_core.models.edges.edge_db_queries import (
COMMUNITY_EDGE_RETURN,
get_community_edge_save_query,
)
logger = logging.getLogger(__name__)
def _community_edge_from_record(record: Any) -> CommunityEdge:
return CommunityEdge(
uuid=record['uuid'],
group_id=record['group_id'],
source_node_uuid=record['source_node_uuid'],
target_node_uuid=record['target_node_uuid'],
created_at=parse_db_date(record['created_at']), # type: ignore[arg-type]
)
class KuzuCommunityEdgeOperations(CommunityEdgeOperations):
async def save(
self,
executor: QueryExecutor,
edge: CommunityEdge,
tx: Transaction | None = None,
) -> None:
query = get_community_edge_save_query(GraphProvider.KUZU)
params: dict[str, Any] = {
'community_uuid': edge.source_node_uuid,
'entity_uuid': edge.target_node_uuid,
'uuid': edge.uuid,
'group_id': edge.group_id,
'created_at': edge.created_at,
}
if tx is not None:
await tx.run(query, **params)
else:
await executor.execute_query(query, **params)
logger.debug(f'Saved Edge to Graph: {edge.uuid}')
async def delete(
self,
executor: QueryExecutor,
edge: CommunityEdge,
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Community)-[e:HAS_MEMBER {uuid: $uuid}]->(m)
DELETE e
"""
if tx is not None:
await tx.run(query, uuid=edge.uuid)
else:
await executor.execute_query(query, uuid=edge.uuid)
logger.debug(f'Deleted Edge: {edge.uuid}')
async def delete_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Community)-[e:HAS_MEMBER]->(m)
WHERE e.uuid IN $uuids
DELETE e
"""
if tx is not None:
await tx.run(query, uuids=uuids)
else:
await executor.execute_query(query, uuids=uuids)
async def get_by_uuid(
self,
executor: QueryExecutor,
uuid: str,
) -> CommunityEdge:
query = (
"""
MATCH (n:Community)-[e:HAS_MEMBER {uuid: $uuid}]->(m)
RETURN
"""
+ COMMUNITY_EDGE_RETURN
)
records, _, _ = await executor.execute_query(query, uuid=uuid)
edges = [_community_edge_from_record(r) for r in records]
if len(edges) == 0:
raise EdgeNotFoundError(uuid)
return edges[0]
async def get_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
) -> list[CommunityEdge]:
query = (
"""
MATCH (n:Community)-[e:HAS_MEMBER]->(m)
WHERE e.uuid IN $uuids
RETURN
"""
+ COMMUNITY_EDGE_RETURN
)
records, _, _ = await executor.execute_query(query, uuids=uuids)
return [_community_edge_from_record(r) for r in records]
async def get_by_group_ids(
self,
executor: QueryExecutor,
group_ids: list[str],
limit: int | None = None,
uuid_cursor: str | None = None,
) -> list[CommunityEdge]:
cursor_clause = 'AND e.uuid < $uuid' if uuid_cursor else ''
limit_clause = 'LIMIT $limit' if limit is not None else ''
query = (
"""
MATCH (n:Community)-[e:HAS_MEMBER]->(m)
WHERE e.group_id IN $group_ids
"""
+ cursor_clause
+ """
RETURN
"""
+ COMMUNITY_EDGE_RETURN
+ """
ORDER BY e.uuid DESC
"""
+ limit_clause
)
records, _, _ = await executor.execute_query(
query,
group_ids=group_ids,
uuid=uuid_cursor,
limit=limit,
)
return [_community_edge_from_record(r) for r in records]
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/kuzu/operations/community_edge_ops.py",
"license": "Apache License 2.0",
"lines": 146,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/driver/kuzu/operations/community_node_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from typing import Any
from graphiti_core.driver.driver import GraphProvider
from graphiti_core.driver.operations.community_node_ops import CommunityNodeOperations
from graphiti_core.driver.query_executor import QueryExecutor, Transaction
from graphiti_core.driver.record_parsers import community_node_from_record
from graphiti_core.errors import NodeNotFoundError
from graphiti_core.models.nodes.node_db_queries import (
COMMUNITY_NODE_RETURN,
get_community_node_save_query,
)
from graphiti_core.nodes import CommunityNode
logger = logging.getLogger(__name__)
class KuzuCommunityNodeOperations(CommunityNodeOperations):
async def save(
self,
executor: QueryExecutor,
node: CommunityNode,
tx: Transaction | None = None,
) -> None:
query = get_community_node_save_query(GraphProvider.KUZU)
params: dict[str, Any] = {
'uuid': node.uuid,
'name': node.name,
'group_id': node.group_id,
'summary': node.summary,
'name_embedding': node.name_embedding,
'created_at': node.created_at,
}
if tx is not None:
await tx.run(query, **params)
else:
await executor.execute_query(query, **params)
logger.debug(f'Saved Community Node to Graph: {node.uuid}')
async def save_bulk(
self,
executor: QueryExecutor,
nodes: list[CommunityNode],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Kuzu doesn't support UNWIND - iterate and save individually
for node in nodes:
await self.save(executor, node, tx=tx)
async def delete(
self,
executor: QueryExecutor,
node: CommunityNode,
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Community {uuid: $uuid})
DETACH DELETE n
"""
if tx is not None:
await tx.run(query, uuid=node.uuid)
else:
await executor.execute_query(query, uuid=node.uuid)
logger.debug(f'Deleted Node: {node.uuid}')
async def delete_by_group_id(
self,
executor: QueryExecutor,
group_id: str,
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Kuzu doesn't support IN TRANSACTIONS OF - simple delete
query = """
MATCH (n:Community {group_id: $group_id})
DETACH DELETE n
"""
if tx is not None:
await tx.run(query, group_id=group_id)
else:
await executor.execute_query(query, group_id=group_id)
async def delete_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Kuzu doesn't support IN TRANSACTIONS OF - simple delete
query = """
MATCH (n:Community)
WHERE n.uuid IN $uuids
DETACH DELETE n
"""
if tx is not None:
await tx.run(query, uuids=uuids)
else:
await executor.execute_query(query, uuids=uuids)
async def get_by_uuid(
self,
executor: QueryExecutor,
uuid: str,
) -> CommunityNode:
query = (
"""
MATCH (c:Community {uuid: $uuid})
RETURN
"""
+ COMMUNITY_NODE_RETURN
)
records, _, _ = await executor.execute_query(query, uuid=uuid)
nodes = [community_node_from_record(r) for r in records]
if len(nodes) == 0:
raise NodeNotFoundError(uuid)
return nodes[0]
async def get_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
) -> list[CommunityNode]:
query = (
"""
MATCH (c:Community)
WHERE c.uuid IN $uuids
RETURN
"""
+ COMMUNITY_NODE_RETURN
)
records, _, _ = await executor.execute_query(query, uuids=uuids)
return [community_node_from_record(r) for r in records]
async def get_by_group_ids(
self,
executor: QueryExecutor,
group_ids: list[str],
limit: int | None = None,
uuid_cursor: str | None = None,
) -> list[CommunityNode]:
cursor_clause = 'AND c.uuid < $uuid' if uuid_cursor else ''
limit_clause = 'LIMIT $limit' if limit is not None else ''
query = (
"""
MATCH (c:Community)
WHERE c.group_id IN $group_ids
"""
+ cursor_clause
+ """
RETURN
"""
+ COMMUNITY_NODE_RETURN
+ """
ORDER BY c.uuid DESC
"""
+ limit_clause
)
records, _, _ = await executor.execute_query(
query,
group_ids=group_ids,
uuid=uuid_cursor,
limit=limit,
)
return [community_node_from_record(r) for r in records]
async def load_name_embedding(
self,
executor: QueryExecutor,
node: CommunityNode,
) -> None:
query = """
MATCH (c:Community {uuid: $uuid})
RETURN c.name_embedding AS name_embedding
"""
records, _, _ = await executor.execute_query(query, uuid=node.uuid)
if len(records) == 0:
raise NodeNotFoundError(node.uuid)
node.name_embedding = records[0]['name_embedding']
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/kuzu/operations/community_node_ops.py",
"license": "Apache License 2.0",
"lines": 180,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/driver/kuzu/operations/entity_edge_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import logging
from typing import Any
from graphiti_core.driver.driver import GraphProvider
from graphiti_core.driver.kuzu.operations.record_parsers import parse_kuzu_entity_edge
from graphiti_core.driver.operations.entity_edge_ops import EntityEdgeOperations
from graphiti_core.driver.query_executor import QueryExecutor, Transaction
from graphiti_core.edges import EntityEdge
from graphiti_core.errors import EdgeNotFoundError
from graphiti_core.models.edges.edge_db_queries import (
get_entity_edge_return_query,
get_entity_edge_save_query,
)
logger = logging.getLogger(__name__)
class KuzuEntityEdgeOperations(EntityEdgeOperations):
async def save(
self,
executor: QueryExecutor,
edge: EntityEdge,
tx: Transaction | None = None,
) -> None:
params: dict[str, Any] = {
'uuid': edge.uuid,
'source_uuid': edge.source_node_uuid,
'target_uuid': edge.target_node_uuid,
'name': edge.name,
'fact': edge.fact,
'fact_embedding': edge.fact_embedding,
'group_id': edge.group_id,
'episodes': edge.episodes,
'created_at': edge.created_at,
'expired_at': edge.expired_at,
'valid_at': edge.valid_at,
'invalid_at': edge.invalid_at,
'attributes': json.dumps(edge.attributes or {}),
}
query = get_entity_edge_save_query(GraphProvider.KUZU)
if tx is not None:
await tx.run(query, **params)
else:
await executor.execute_query(query, **params)
logger.debug(f'Saved Edge to Graph: {edge.uuid}')
async def save_bulk(
self,
executor: QueryExecutor,
edges: list[EntityEdge],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Kuzu doesn't support UNWIND - iterate and save individually
for edge in edges:
await self.save(executor, edge, tx=tx)
async def delete(
self,
executor: QueryExecutor,
edge: EntityEdge,
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Entity)-[:RELATES_TO]->(e:RelatesToNode_ {uuid: $uuid})-[:RELATES_TO]->(m:Entity)
DETACH DELETE e
"""
if tx is not None:
await tx.run(query, uuid=edge.uuid)
else:
await executor.execute_query(query, uuid=edge.uuid)
logger.debug(f'Deleted Edge: {edge.uuid}')
async def delete_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Entity)-[:RELATES_TO]->(e:RelatesToNode_)-[:RELATES_TO]->(m:Entity)
WHERE e.uuid IN $uuids
DETACH DELETE e
"""
if tx is not None:
await tx.run(query, uuids=uuids)
else:
await executor.execute_query(query, uuids=uuids)
async def get_by_uuid(
self,
executor: QueryExecutor,
uuid: str,
) -> EntityEdge:
query = """
MATCH (n:Entity)-[:RELATES_TO]->(e:RelatesToNode_ {uuid: $uuid})-[:RELATES_TO]->(m:Entity)
RETURN
""" + get_entity_edge_return_query(GraphProvider.KUZU)
records, _, _ = await executor.execute_query(query, uuid=uuid)
edges = [parse_kuzu_entity_edge(r) for r in records]
if len(edges) == 0:
raise EdgeNotFoundError(uuid)
return edges[0]
async def get_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
) -> list[EntityEdge]:
if not uuids:
return []
query = """
MATCH (n:Entity)-[:RELATES_TO]->(e:RelatesToNode_)-[:RELATES_TO]->(m:Entity)
WHERE e.uuid IN $uuids
RETURN
""" + get_entity_edge_return_query(GraphProvider.KUZU)
records, _, _ = await executor.execute_query(query, uuids=uuids)
return [parse_kuzu_entity_edge(r) for r in records]
async def get_by_group_ids(
self,
executor: QueryExecutor,
group_ids: list[str],
limit: int | None = None,
uuid_cursor: str | None = None,
) -> list[EntityEdge]:
cursor_clause = 'AND e.uuid < $uuid' if uuid_cursor else ''
limit_clause = 'LIMIT $limit' if limit is not None else ''
query = (
"""
MATCH (n:Entity)-[:RELATES_TO]->(e:RelatesToNode_)-[:RELATES_TO]->(m:Entity)
WHERE e.group_id IN $group_ids
"""
+ cursor_clause
+ """
RETURN
"""
+ get_entity_edge_return_query(GraphProvider.KUZU)
+ """
ORDER BY e.uuid DESC
"""
+ limit_clause
)
records, _, _ = await executor.execute_query(
query,
group_ids=group_ids,
uuid=uuid_cursor,
limit=limit,
)
return [parse_kuzu_entity_edge(r) for r in records]
async def get_between_nodes(
self,
executor: QueryExecutor,
source_node_uuid: str,
target_node_uuid: str,
) -> list[EntityEdge]:
query = """
MATCH (n:Entity {uuid: $source_node_uuid})-[:RELATES_TO]->(e:RelatesToNode_)-[:RELATES_TO]->(m:Entity {uuid: $target_node_uuid})
RETURN
""" + get_entity_edge_return_query(GraphProvider.KUZU)
records, _, _ = await executor.execute_query(
query,
source_node_uuid=source_node_uuid,
target_node_uuid=target_node_uuid,
)
return [parse_kuzu_entity_edge(r) for r in records]
async def get_by_node_uuid(
self,
executor: QueryExecutor,
node_uuid: str,
) -> list[EntityEdge]:
query = """
MATCH (n:Entity {uuid: $node_uuid})-[:RELATES_TO]->(e:RelatesToNode_)-[:RELATES_TO]->(m:Entity)
RETURN
""" + get_entity_edge_return_query(GraphProvider.KUZU)
records, _, _ = await executor.execute_query(query, node_uuid=node_uuid)
return [parse_kuzu_entity_edge(r) for r in records]
async def load_embeddings(
self,
executor: QueryExecutor,
edge: EntityEdge,
) -> None:
query = """
MATCH (n:Entity)-[:RELATES_TO]->(e:RelatesToNode_ {uuid: $uuid})-[:RELATES_TO]->(m:Entity)
RETURN e.fact_embedding AS fact_embedding
"""
records, _, _ = await executor.execute_query(query, uuid=edge.uuid)
if len(records) == 0:
raise EdgeNotFoundError(edge.uuid)
edge.fact_embedding = records[0]['fact_embedding']
async def load_embeddings_bulk(
self,
executor: QueryExecutor,
edges: list[EntityEdge],
batch_size: int = 100,
) -> None:
uuids = [e.uuid for e in edges]
query = """
MATCH (n:Entity)-[:RELATES_TO]->(e:RelatesToNode_)-[:RELATES_TO]->(m:Entity)
WHERE e.uuid IN $edge_uuids
RETURN DISTINCT e.uuid AS uuid, e.fact_embedding AS fact_embedding
"""
records, _, _ = await executor.execute_query(query, edge_uuids=uuids)
embedding_map = {r['uuid']: r['fact_embedding'] for r in records}
for edge in edges:
if edge.uuid in embedding_map:
edge.fact_embedding = embedding_map[edge.uuid]
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/kuzu/operations/entity_edge_ops.py",
"license": "Apache License 2.0",
"lines": 210,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/driver/kuzu/operations/entity_node_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import logging
from typing import Any
from graphiti_core.driver.driver import GraphProvider
from graphiti_core.driver.kuzu.operations.record_parsers import parse_kuzu_entity_node
from graphiti_core.driver.operations.entity_node_ops import EntityNodeOperations
from graphiti_core.driver.query_executor import QueryExecutor, Transaction
from graphiti_core.errors import NodeNotFoundError
from graphiti_core.models.nodes.node_db_queries import (
get_entity_node_return_query,
get_entity_node_save_query,
)
from graphiti_core.nodes import EntityNode
logger = logging.getLogger(__name__)
class KuzuEntityNodeOperations(EntityNodeOperations):
async def save(
self,
executor: QueryExecutor,
node: EntityNode,
tx: Transaction | None = None,
) -> None:
# Kuzu uses individual SET per property, attributes serialized as JSON
attrs_json = json.dumps(node.attributes or {})
params: dict[str, Any] = {
'uuid': node.uuid,
'name': node.name,
'name_embedding': node.name_embedding,
'group_id': node.group_id,
'summary': node.summary,
'created_at': node.created_at,
'labels': list(set(node.labels + ['Entity'])),
'attributes': attrs_json,
}
query = get_entity_node_save_query(GraphProvider.KUZU, '')
if tx is not None:
await tx.run(query, **params)
else:
await executor.execute_query(query, **params)
logger.debug(f'Saved Node to Graph: {node.uuid}')
async def save_bulk(
self,
executor: QueryExecutor,
nodes: list[EntityNode],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Kuzu doesn't support UNWIND - iterate and save individually
for node in nodes:
await self.save(executor, node, tx=tx)
async def delete(
self,
executor: QueryExecutor,
node: EntityNode,
tx: Transaction | None = None,
) -> None:
# Also delete connected RelatesToNode_ intermediates
cleanup_query = """
MATCH (n:Entity {uuid: $uuid})-[:RELATES_TO]->(r:RelatesToNode_)
DETACH DELETE r
"""
delete_query = """
MATCH (n:Entity {uuid: $uuid})
DETACH DELETE n
"""
if tx is not None:
await tx.run(cleanup_query, uuid=node.uuid)
await tx.run(delete_query, uuid=node.uuid)
else:
await executor.execute_query(cleanup_query, uuid=node.uuid)
await executor.execute_query(delete_query, uuid=node.uuid)
logger.debug(f'Deleted Node: {node.uuid}')
async def delete_by_group_id(
self,
executor: QueryExecutor,
group_id: str,
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Clean up RelatesToNode_ intermediates first
cleanup_query = """
MATCH (n:Entity {group_id: $group_id})-[:RELATES_TO]->(r:RelatesToNode_)
DETACH DELETE r
"""
query = """
MATCH (n:Entity {group_id: $group_id})
DETACH DELETE n
"""
if tx is not None:
await tx.run(cleanup_query, group_id=group_id)
await tx.run(query, group_id=group_id)
else:
await executor.execute_query(cleanup_query, group_id=group_id)
await executor.execute_query(query, group_id=group_id)
async def delete_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
cleanup_query = """
MATCH (n:Entity)-[:RELATES_TO]->(r:RelatesToNode_)
WHERE n.uuid IN $uuids
DETACH DELETE r
"""
query = """
MATCH (n:Entity)
WHERE n.uuid IN $uuids
DETACH DELETE n
"""
if tx is not None:
await tx.run(cleanup_query, uuids=uuids)
await tx.run(query, uuids=uuids)
else:
await executor.execute_query(cleanup_query, uuids=uuids)
await executor.execute_query(query, uuids=uuids)
async def get_by_uuid(
self,
executor: QueryExecutor,
uuid: str,
) -> EntityNode:
query = """
MATCH (n:Entity {uuid: $uuid})
RETURN
""" + get_entity_node_return_query(GraphProvider.KUZU)
records, _, _ = await executor.execute_query(query, uuid=uuid)
nodes = [parse_kuzu_entity_node(r) for r in records]
if len(nodes) == 0:
raise NodeNotFoundError(uuid)
return nodes[0]
async def get_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
) -> list[EntityNode]:
query = """
MATCH (n:Entity)
WHERE n.uuid IN $uuids
RETURN
""" + get_entity_node_return_query(GraphProvider.KUZU)
records, _, _ = await executor.execute_query(query, uuids=uuids)
return [parse_kuzu_entity_node(r) for r in records]
async def get_by_group_ids(
self,
executor: QueryExecutor,
group_ids: list[str],
limit: int | None = None,
uuid_cursor: str | None = None,
) -> list[EntityNode]:
cursor_clause = 'AND n.uuid < $uuid' if uuid_cursor else ''
limit_clause = 'LIMIT $limit' if limit is not None else ''
query = (
"""
MATCH (n:Entity)
WHERE n.group_id IN $group_ids
"""
+ cursor_clause
+ """
RETURN
"""
+ get_entity_node_return_query(GraphProvider.KUZU)
+ """
ORDER BY n.uuid DESC
"""
+ limit_clause
)
records, _, _ = await executor.execute_query(
query,
group_ids=group_ids,
uuid=uuid_cursor,
limit=limit,
)
return [parse_kuzu_entity_node(r) for r in records]
async def load_embeddings(
self,
executor: QueryExecutor,
node: EntityNode,
) -> None:
query = """
MATCH (n:Entity {uuid: $uuid})
RETURN n.name_embedding AS name_embedding
"""
records, _, _ = await executor.execute_query(query, uuid=node.uuid)
if len(records) == 0:
raise NodeNotFoundError(node.uuid)
node.name_embedding = records[0]['name_embedding']
async def load_embeddings_bulk(
self,
executor: QueryExecutor,
nodes: list[EntityNode],
batch_size: int = 100,
) -> None:
uuids = [n.uuid for n in nodes]
query = """
MATCH (n:Entity)
WHERE n.uuid IN $uuids
RETURN DISTINCT n.uuid AS uuid, n.name_embedding AS name_embedding
"""
records, _, _ = await executor.execute_query(query, uuids=uuids)
embedding_map = {r['uuid']: r['name_embedding'] for r in records}
for node in nodes:
if node.uuid in embedding_map:
node.name_embedding = embedding_map[node.uuid]
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/kuzu/operations/entity_node_ops.py",
"license": "Apache License 2.0",
"lines": 215,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/driver/kuzu/operations/episode_node_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from datetime import datetime
from typing import Any
from graphiti_core.driver.driver import GraphProvider
from graphiti_core.driver.operations.episode_node_ops import EpisodeNodeOperations
from graphiti_core.driver.query_executor import QueryExecutor, Transaction
from graphiti_core.driver.record_parsers import episodic_node_from_record
from graphiti_core.errors import NodeNotFoundError
from graphiti_core.models.nodes.node_db_queries import (
EPISODIC_NODE_RETURN,
get_episode_node_save_query,
)
from graphiti_core.nodes import EpisodicNode
logger = logging.getLogger(__name__)
class KuzuEpisodeNodeOperations(EpisodeNodeOperations):
async def save(
self,
executor: QueryExecutor,
node: EpisodicNode,
tx: Transaction | None = None,
) -> None:
query = get_episode_node_save_query(GraphProvider.KUZU)
params: dict[str, Any] = {
'uuid': node.uuid,
'name': node.name,
'group_id': node.group_id,
'source_description': node.source_description,
'content': node.content,
'entity_edges': node.entity_edges,
'created_at': node.created_at,
'valid_at': node.valid_at,
'source': node.source.value,
}
if tx is not None:
await tx.run(query, **params)
else:
await executor.execute_query(query, **params)
logger.debug(f'Saved Episode to Graph: {node.uuid}')
async def save_bulk(
self,
executor: QueryExecutor,
nodes: list[EpisodicNode],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Kuzu doesn't support UNWIND - iterate and save individually
for node in nodes:
await self.save(executor, node, tx=tx)
async def delete(
self,
executor: QueryExecutor,
node: EpisodicNode,
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Episodic {uuid: $uuid})
DETACH DELETE n
"""
if tx is not None:
await tx.run(query, uuid=node.uuid)
else:
await executor.execute_query(query, uuid=node.uuid)
logger.debug(f'Deleted Node: {node.uuid}')
async def delete_by_group_id(
self,
executor: QueryExecutor,
group_id: str,
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Kuzu doesn't support IN TRANSACTIONS OF - simple delete
query = """
MATCH (n:Episodic {group_id: $group_id})
DETACH DELETE n
"""
if tx is not None:
await tx.run(query, group_id=group_id)
else:
await executor.execute_query(query, group_id=group_id)
async def delete_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Kuzu doesn't support IN TRANSACTIONS OF - simple delete
query = """
MATCH (n:Episodic)
WHERE n.uuid IN $uuids
DETACH DELETE n
"""
if tx is not None:
await tx.run(query, uuids=uuids)
else:
await executor.execute_query(query, uuids=uuids)
async def get_by_uuid(
self,
executor: QueryExecutor,
uuid: str,
) -> EpisodicNode:
query = (
"""
MATCH (e:Episodic {uuid: $uuid})
RETURN
"""
+ EPISODIC_NODE_RETURN
)
records, _, _ = await executor.execute_query(query, uuid=uuid)
episodes = [episodic_node_from_record(r) for r in records]
if len(episodes) == 0:
raise NodeNotFoundError(uuid)
return episodes[0]
async def get_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
) -> list[EpisodicNode]:
query = (
"""
MATCH (e:Episodic)
WHERE e.uuid IN $uuids
RETURN DISTINCT
"""
+ EPISODIC_NODE_RETURN
)
records, _, _ = await executor.execute_query(query, uuids=uuids)
return [episodic_node_from_record(r) for r in records]
async def get_by_group_ids(
self,
executor: QueryExecutor,
group_ids: list[str],
limit: int | None = None,
uuid_cursor: str | None = None,
) -> list[EpisodicNode]:
cursor_clause = 'AND e.uuid < $uuid' if uuid_cursor else ''
limit_clause = 'LIMIT $limit' if limit is not None else ''
query = (
"""
MATCH (e:Episodic)
WHERE e.group_id IN $group_ids
"""
+ cursor_clause
+ """
RETURN DISTINCT
"""
+ EPISODIC_NODE_RETURN
+ """
ORDER BY uuid DESC
"""
+ limit_clause
)
records, _, _ = await executor.execute_query(
query,
group_ids=group_ids,
uuid=uuid_cursor,
limit=limit,
)
return [episodic_node_from_record(r) for r in records]
async def get_by_entity_node_uuid(
self,
executor: QueryExecutor,
entity_node_uuid: str,
) -> list[EpisodicNode]:
query = (
"""
MATCH (e:Episodic)-[r:MENTIONS]->(n:Entity {uuid: $entity_node_uuid})
RETURN DISTINCT
"""
+ EPISODIC_NODE_RETURN
)
records, _, _ = await executor.execute_query(query, entity_node_uuid=entity_node_uuid)
return [episodic_node_from_record(r) for r in records]
async def retrieve_episodes(
self,
executor: QueryExecutor,
reference_time: datetime,
last_n: int = 3,
group_ids: list[str] | None = None,
source: str | None = None,
saga: str | None = None,
) -> list[EpisodicNode]:
if saga is not None and group_ids is not None and len(group_ids) > 0:
source_clause = 'AND e.source = $source' if source else ''
query = (
"""
MATCH (s:Saga {name: $saga_name, group_id: $group_id})-[:HAS_EPISODE]->(e:Episodic)
WHERE e.valid_at <= $reference_time
"""
+ source_clause
+ """
RETURN
"""
+ EPISODIC_NODE_RETURN
+ """
ORDER BY e.valid_at DESC
LIMIT $num_episodes
"""
)
records, _, _ = await executor.execute_query(
query,
saga_name=saga,
group_id=group_ids[0],
reference_time=reference_time,
source=source,
num_episodes=last_n,
)
else:
source_clause = 'AND e.source = $source' if source else ''
group_clause = 'AND e.group_id IN $group_ids' if group_ids else ''
query = (
"""
MATCH (e:Episodic)
WHERE e.valid_at <= $reference_time
"""
+ group_clause
+ source_clause
+ """
RETURN
"""
+ EPISODIC_NODE_RETURN
+ """
ORDER BY e.valid_at DESC
LIMIT $num_episodes
"""
)
records, _, _ = await executor.execute_query(
query,
reference_time=reference_time,
group_ids=group_ids,
source=source,
num_episodes=last_n,
)
return [episodic_node_from_record(r) for r in records]
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/kuzu/operations/episode_node_ops.py",
"license": "Apache License 2.0",
"lines": 246,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/driver/kuzu/operations/episodic_edge_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from typing import Any
from graphiti_core.driver.driver import GraphProvider
from graphiti_core.driver.operations.episodic_edge_ops import EpisodicEdgeOperations
from graphiti_core.driver.query_executor import QueryExecutor, Transaction
from graphiti_core.edges import EpisodicEdge
from graphiti_core.errors import EdgeNotFoundError
from graphiti_core.helpers import parse_db_date
from graphiti_core.models.edges.edge_db_queries import (
EPISODIC_EDGE_RETURN,
EPISODIC_EDGE_SAVE,
get_episodic_edge_save_bulk_query,
)
logger = logging.getLogger(__name__)
def _episodic_edge_from_record(record: Any) -> EpisodicEdge:
return EpisodicEdge(
uuid=record['uuid'],
group_id=record['group_id'],
source_node_uuid=record['source_node_uuid'],
target_node_uuid=record['target_node_uuid'],
created_at=parse_db_date(record['created_at']), # type: ignore[arg-type]
)
class KuzuEpisodicEdgeOperations(EpisodicEdgeOperations):
async def save(
self,
executor: QueryExecutor,
edge: EpisodicEdge,
tx: Transaction | None = None,
) -> None:
params: dict[str, Any] = {
'episode_uuid': edge.source_node_uuid,
'entity_uuid': edge.target_node_uuid,
'uuid': edge.uuid,
'group_id': edge.group_id,
'created_at': edge.created_at,
}
if tx is not None:
await tx.run(EPISODIC_EDGE_SAVE, **params)
else:
await executor.execute_query(EPISODIC_EDGE_SAVE, **params)
logger.debug(f'Saved Edge to Graph: {edge.uuid}')
async def save_bulk(
self,
executor: QueryExecutor,
edges: list[EpisodicEdge],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Kuzu doesn't support UNWIND - iterate and save individually
query = get_episodic_edge_save_bulk_query(GraphProvider.KUZU)
for edge in edges:
params: dict[str, Any] = {
'source_node_uuid': edge.source_node_uuid,
'target_node_uuid': edge.target_node_uuid,
'uuid': edge.uuid,
'group_id': edge.group_id,
'created_at': edge.created_at,
}
if tx is not None:
await tx.run(query, **params)
else:
await executor.execute_query(query, **params)
async def delete(
self,
executor: QueryExecutor,
edge: EpisodicEdge,
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Episodic)-[e:MENTIONS {uuid: $uuid}]->(m:Entity)
DELETE e
"""
if tx is not None:
await tx.run(query, uuid=edge.uuid)
else:
await executor.execute_query(query, uuid=edge.uuid)
logger.debug(f'Deleted Edge: {edge.uuid}')
async def delete_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Episodic)-[e:MENTIONS]->(m:Entity)
WHERE e.uuid IN $uuids
DELETE e
"""
if tx is not None:
await tx.run(query, uuids=uuids)
else:
await executor.execute_query(query, uuids=uuids)
async def get_by_uuid(
self,
executor: QueryExecutor,
uuid: str,
) -> EpisodicEdge:
query = (
"""
MATCH (n:Episodic)-[e:MENTIONS {uuid: $uuid}]->(m:Entity)
RETURN
"""
+ EPISODIC_EDGE_RETURN
)
records, _, _ = await executor.execute_query(query, uuid=uuid)
edges = [_episodic_edge_from_record(r) for r in records]
if len(edges) == 0:
raise EdgeNotFoundError(uuid)
return edges[0]
async def get_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
) -> list[EpisodicEdge]:
query = (
"""
MATCH (n:Episodic)-[e:MENTIONS]->(m:Entity)
WHERE e.uuid IN $uuids
RETURN
"""
+ EPISODIC_EDGE_RETURN
)
records, _, _ = await executor.execute_query(query, uuids=uuids)
return [_episodic_edge_from_record(r) for r in records]
async def get_by_group_ids(
self,
executor: QueryExecutor,
group_ids: list[str],
limit: int | None = None,
uuid_cursor: str | None = None,
) -> list[EpisodicEdge]:
cursor_clause = 'AND e.uuid < $uuid' if uuid_cursor else ''
limit_clause = 'LIMIT $limit' if limit is not None else ''
query = (
"""
MATCH (n:Episodic)-[e:MENTIONS]->(m:Entity)
WHERE e.group_id IN $group_ids
"""
+ cursor_clause
+ """
RETURN
"""
+ EPISODIC_EDGE_RETURN
+ """
ORDER BY e.uuid DESC
"""
+ limit_clause
)
records, _, _ = await executor.execute_query(
query,
group_ids=group_ids,
uuid=uuid_cursor,
limit=limit,
)
return [_episodic_edge_from_record(r) for r in records]
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/kuzu/operations/episodic_edge_ops.py",
"license": "Apache License 2.0",
"lines": 167,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/driver/kuzu/operations/graph_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from typing import Any
from graphiti_core.driver.driver import GraphProvider
from graphiti_core.driver.kuzu.operations.record_parsers import parse_kuzu_entity_node
from graphiti_core.driver.operations.graph_ops import GraphMaintenanceOperations
from graphiti_core.driver.operations.graph_utils import Neighbor, label_propagation
from graphiti_core.driver.query_executor import QueryExecutor
from graphiti_core.driver.record_parsers import community_node_from_record
from graphiti_core.graph_queries import get_fulltext_indices, get_range_indices
from graphiti_core.helpers import semaphore_gather
from graphiti_core.models.nodes.node_db_queries import (
COMMUNITY_NODE_RETURN,
get_entity_node_return_query,
)
from graphiti_core.nodes import CommunityNode, EntityNode, EpisodicNode
logger = logging.getLogger(__name__)
class KuzuGraphMaintenanceOperations(GraphMaintenanceOperations):
async def clear_data(
self,
executor: QueryExecutor,
group_ids: list[str] | None = None,
) -> None:
if group_ids is None:
await executor.execute_query('MATCH (n) DETACH DELETE n')
else:
# Kuzu requires deleting RelatesToNode_ intermediates in addition to
# Entity, Episodic, and Community nodes.
for label in ['RelatesToNode_', 'Entity', 'Episodic', 'Community']:
await executor.execute_query(
f"""
MATCH (n:{label})
WHERE n.group_id IN $group_ids
DETACH DELETE n
""",
group_ids=group_ids,
)
async def build_indices_and_constraints(
self,
executor: QueryExecutor,
delete_existing: bool = False,
) -> None:
if delete_existing:
await self.delete_all_indexes(executor)
# Kuzu schema is static (created in setup_schema), so range indices
# return an empty list. Only FTS indices need to be created here.
range_indices = get_range_indices(GraphProvider.KUZU)
fulltext_indices = get_fulltext_indices(GraphProvider.KUZU)
index_queries = range_indices + fulltext_indices
await semaphore_gather(*[executor.execute_query(q) for q in index_queries])
async def delete_all_indexes(
self,
executor: QueryExecutor,
) -> None:
# Kuzu does not have a standard way to drop all indexes programmatically.
pass
async def get_community_clusters(
self,
executor: QueryExecutor,
group_ids: list[str] | None = None,
) -> list[Any]:
community_clusters: list[list[EntityNode]] = []
if group_ids is None:
group_id_values, _, _ = await executor.execute_query(
"""
MATCH (n:Entity)
WHERE n.group_id IS NOT NULL
RETURN
collect(DISTINCT n.group_id) AS group_ids
"""
)
group_ids = group_id_values[0]['group_ids'] if group_id_values else []
resolved_group_ids: list[str] = group_ids or []
for group_id in resolved_group_ids:
projection: dict[str, list[Neighbor]] = {}
# Get all entity nodes for this group
node_records, _, _ = await executor.execute_query(
"""
MATCH (n:Entity)
WHERE n.group_id IN $group_ids
RETURN
"""
+ get_entity_node_return_query(GraphProvider.KUZU),
group_ids=[group_id],
)
nodes = [parse_kuzu_entity_node(r) for r in node_records]
for node in nodes:
# Kuzu edges are modeled through RelatesToNode_ intermediate nodes
records, _, _ = await executor.execute_query(
"""
MATCH (n:Entity {group_id: $group_id, uuid: $uuid})-[:RELATES_TO]->(:RelatesToNode_)-[:RELATES_TO]-(m:Entity {group_id: $group_id})
WITH count(*) AS count, m.uuid AS uuid
RETURN
uuid,
count
""",
uuid=node.uuid,
group_id=group_id,
)
projection[node.uuid] = [
Neighbor(node_uuid=record['uuid'], edge_count=record['count'])
for record in records
]
cluster_uuids = label_propagation(projection)
# Fetch full node objects for each cluster
for cluster in cluster_uuids:
if not cluster:
continue
cluster_records, _, _ = await executor.execute_query(
"""
MATCH (n:Entity)
WHERE n.uuid IN $uuids
RETURN
"""
+ get_entity_node_return_query(GraphProvider.KUZU),
uuids=cluster,
)
community_clusters.append([parse_kuzu_entity_node(r) for r in cluster_records])
return community_clusters
async def remove_communities(
self,
executor: QueryExecutor,
) -> None:
await executor.execute_query(
"""
MATCH (c:Community)
DETACH DELETE c
"""
)
async def determine_entity_community(
self,
executor: QueryExecutor,
entity: EntityNode,
) -> None:
# Check if the node is already part of a community
records, _, _ = await executor.execute_query(
"""
MATCH (c:Community)-[:HAS_MEMBER]->(n:Entity {uuid: $entity_uuid})
RETURN
"""
+ COMMUNITY_NODE_RETURN,
entity_uuid=entity.uuid,
)
if len(records) > 0:
return
# If the node has no community, find the mode community of surrounding
# entities. Kuzu uses RelatesToNode_ as an intermediate for RELATES_TO edges.
records, _, _ = await executor.execute_query(
"""
MATCH (c:Community)-[:HAS_MEMBER]->(m:Entity)-[:RELATES_TO]->(:RelatesToNode_)-[:RELATES_TO]-(n:Entity {uuid: $entity_uuid})
RETURN
"""
+ COMMUNITY_NODE_RETURN,
entity_uuid=entity.uuid,
)
async def get_mentioned_nodes(
self,
executor: QueryExecutor,
episodes: list[EpisodicNode],
) -> list[EntityNode]:
episode_uuids = [episode.uuid for episode in episodes]
records, _, _ = await executor.execute_query(
"""
MATCH (episode:Episodic)-[:MENTIONS]->(n:Entity)
WHERE episode.uuid IN $uuids
RETURN DISTINCT
"""
+ get_entity_node_return_query(GraphProvider.KUZU),
uuids=episode_uuids,
)
return [parse_kuzu_entity_node(r) for r in records]
async def get_communities_by_nodes(
self,
executor: QueryExecutor,
nodes: list[EntityNode],
) -> list[CommunityNode]:
node_uuids = [node.uuid for node in nodes]
records, _, _ = await executor.execute_query(
"""
MATCH (c:Community)-[:HAS_MEMBER]->(m:Entity)
WHERE m.uuid IN $uuids
RETURN DISTINCT
"""
+ COMMUNITY_NODE_RETURN,
uuids=node_uuids,
)
return [community_node_from_record(r) for r in records]
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/kuzu/operations/graph_ops.py",
"license": "Apache License 2.0",
"lines": 198,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/driver/kuzu/operations/has_episode_edge_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from typing import Any
from graphiti_core.driver.operations.has_episode_edge_ops import HasEpisodeEdgeOperations
from graphiti_core.driver.query_executor import QueryExecutor, Transaction
from graphiti_core.edges import HasEpisodeEdge
from graphiti_core.errors import EdgeNotFoundError
from graphiti_core.helpers import parse_db_date
from graphiti_core.models.edges.edge_db_queries import (
HAS_EPISODE_EDGE_RETURN,
HAS_EPISODE_EDGE_SAVE,
)
logger = logging.getLogger(__name__)
def _has_episode_edge_from_record(record: Any) -> HasEpisodeEdge:
return HasEpisodeEdge(
uuid=record['uuid'],
group_id=record['group_id'],
source_node_uuid=record['source_node_uuid'],
target_node_uuid=record['target_node_uuid'],
created_at=parse_db_date(record['created_at']), # type: ignore[arg-type]
)
class KuzuHasEpisodeEdgeOperations(HasEpisodeEdgeOperations):
async def save(
self,
executor: QueryExecutor,
edge: HasEpisodeEdge,
tx: Transaction | None = None,
) -> None:
params: dict[str, Any] = {
'saga_uuid': edge.source_node_uuid,
'episode_uuid': edge.target_node_uuid,
'uuid': edge.uuid,
'group_id': edge.group_id,
'created_at': edge.created_at,
}
if tx is not None:
await tx.run(HAS_EPISODE_EDGE_SAVE, **params)
else:
await executor.execute_query(HAS_EPISODE_EDGE_SAVE, **params)
logger.debug(f'Saved Edge to Graph: {edge.uuid}')
async def save_bulk(
self,
executor: QueryExecutor,
edges: list[HasEpisodeEdge],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
for edge in edges:
await self.save(executor, edge, tx=tx)
async def delete(
self,
executor: QueryExecutor,
edge: HasEpisodeEdge,
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Saga)-[e:HAS_EPISODE {uuid: $uuid}]->(m:Episodic)
DELETE e
"""
if tx is not None:
await tx.run(query, uuid=edge.uuid)
else:
await executor.execute_query(query, uuid=edge.uuid)
logger.debug(f'Deleted Edge: {edge.uuid}')
async def delete_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Saga)-[e:HAS_EPISODE]->(m:Episodic)
WHERE e.uuid IN $uuids
DELETE e
"""
if tx is not None:
await tx.run(query, uuids=uuids)
else:
await executor.execute_query(query, uuids=uuids)
async def get_by_uuid(
self,
executor: QueryExecutor,
uuid: str,
) -> HasEpisodeEdge:
query = (
"""
MATCH (n:Saga)-[e:HAS_EPISODE {uuid: $uuid}]->(m:Episodic)
RETURN
"""
+ HAS_EPISODE_EDGE_RETURN
)
records, _, _ = await executor.execute_query(query, uuid=uuid)
edges = [_has_episode_edge_from_record(r) for r in records]
if len(edges) == 0:
raise EdgeNotFoundError(uuid)
return edges[0]
async def get_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
) -> list[HasEpisodeEdge]:
query = (
"""
MATCH (n:Saga)-[e:HAS_EPISODE]->(m:Episodic)
WHERE e.uuid IN $uuids
RETURN
"""
+ HAS_EPISODE_EDGE_RETURN
)
records, _, _ = await executor.execute_query(query, uuids=uuids)
return [_has_episode_edge_from_record(r) for r in records]
async def get_by_group_ids(
self,
executor: QueryExecutor,
group_ids: list[str],
limit: int | None = None,
uuid_cursor: str | None = None,
) -> list[HasEpisodeEdge]:
cursor_clause = 'AND e.uuid < $uuid' if uuid_cursor else ''
limit_clause = 'LIMIT $limit' if limit is not None else ''
query = (
"""
MATCH (n:Saga)-[e:HAS_EPISODE]->(m:Episodic)
WHERE e.group_id IN $group_ids
"""
+ cursor_clause
+ """
RETURN
"""
+ HAS_EPISODE_EDGE_RETURN
+ """
ORDER BY e.uuid DESC
"""
+ limit_clause
)
records, _, _ = await executor.execute_query(
query,
group_ids=group_ids,
uuid=uuid_cursor,
limit=limit,
)
return [_has_episode_edge_from_record(r) for r in records]
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/kuzu/operations/has_episode_edge_ops.py",
"license": "Apache License 2.0",
"lines": 153,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/driver/kuzu/operations/next_episode_edge_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from typing import Any
from graphiti_core.driver.operations.next_episode_edge_ops import NextEpisodeEdgeOperations
from graphiti_core.driver.query_executor import QueryExecutor, Transaction
from graphiti_core.edges import NextEpisodeEdge
from graphiti_core.errors import EdgeNotFoundError
from graphiti_core.helpers import parse_db_date
from graphiti_core.models.edges.edge_db_queries import (
NEXT_EPISODE_EDGE_RETURN,
NEXT_EPISODE_EDGE_SAVE,
)
logger = logging.getLogger(__name__)
def _next_episode_edge_from_record(record: Any) -> NextEpisodeEdge:
return NextEpisodeEdge(
uuid=record['uuid'],
group_id=record['group_id'],
source_node_uuid=record['source_node_uuid'],
target_node_uuid=record['target_node_uuid'],
created_at=parse_db_date(record['created_at']), # type: ignore[arg-type]
)
class KuzuNextEpisodeEdgeOperations(NextEpisodeEdgeOperations):
async def save(
self,
executor: QueryExecutor,
edge: NextEpisodeEdge,
tx: Transaction | None = None,
) -> None:
params: dict[str, Any] = {
'source_episode_uuid': edge.source_node_uuid,
'target_episode_uuid': edge.target_node_uuid,
'uuid': edge.uuid,
'group_id': edge.group_id,
'created_at': edge.created_at,
}
if tx is not None:
await tx.run(NEXT_EPISODE_EDGE_SAVE, **params)
else:
await executor.execute_query(NEXT_EPISODE_EDGE_SAVE, **params)
logger.debug(f'Saved Edge to Graph: {edge.uuid}')
async def save_bulk(
self,
executor: QueryExecutor,
edges: list[NextEpisodeEdge],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
for edge in edges:
await self.save(executor, edge, tx=tx)
async def delete(
self,
executor: QueryExecutor,
edge: NextEpisodeEdge,
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Episodic)-[e:NEXT_EPISODE {uuid: $uuid}]->(m:Episodic)
DELETE e
"""
if tx is not None:
await tx.run(query, uuid=edge.uuid)
else:
await executor.execute_query(query, uuid=edge.uuid)
logger.debug(f'Deleted Edge: {edge.uuid}')
async def delete_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Episodic)-[e:NEXT_EPISODE]->(m:Episodic)
WHERE e.uuid IN $uuids
DELETE e
"""
if tx is not None:
await tx.run(query, uuids=uuids)
else:
await executor.execute_query(query, uuids=uuids)
async def get_by_uuid(
self,
executor: QueryExecutor,
uuid: str,
) -> NextEpisodeEdge:
query = (
"""
MATCH (n:Episodic)-[e:NEXT_EPISODE {uuid: $uuid}]->(m:Episodic)
RETURN
"""
+ NEXT_EPISODE_EDGE_RETURN
)
records, _, _ = await executor.execute_query(query, uuid=uuid)
edges = [_next_episode_edge_from_record(r) for r in records]
if len(edges) == 0:
raise EdgeNotFoundError(uuid)
return edges[0]
async def get_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
) -> list[NextEpisodeEdge]:
query = (
"""
MATCH (n:Episodic)-[e:NEXT_EPISODE]->(m:Episodic)
WHERE e.uuid IN $uuids
RETURN
"""
+ NEXT_EPISODE_EDGE_RETURN
)
records, _, _ = await executor.execute_query(query, uuids=uuids)
return [_next_episode_edge_from_record(r) for r in records]
async def get_by_group_ids(
self,
executor: QueryExecutor,
group_ids: list[str],
limit: int | None = None,
uuid_cursor: str | None = None,
) -> list[NextEpisodeEdge]:
cursor_clause = 'AND e.uuid < $uuid' if uuid_cursor else ''
limit_clause = 'LIMIT $limit' if limit is not None else ''
query = (
"""
MATCH (n:Episodic)-[e:NEXT_EPISODE]->(m:Episodic)
WHERE e.group_id IN $group_ids
"""
+ cursor_clause
+ """
RETURN
"""
+ NEXT_EPISODE_EDGE_RETURN
+ """
ORDER BY e.uuid DESC
"""
+ limit_clause
)
records, _, _ = await executor.execute_query(
query,
group_ids=group_ids,
uuid=uuid_cursor,
limit=limit,
)
return [_next_episode_edge_from_record(r) for r in records]
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/kuzu/operations/next_episode_edge_ops.py",
"license": "Apache License 2.0",
"lines": 153,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/driver/kuzu/operations/record_parsers.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
from typing import Any
from graphiti_core.driver.record_parsers import entity_edge_from_record, entity_node_from_record
from graphiti_core.edges import EntityEdge
from graphiti_core.nodes import EntityNode
def parse_kuzu_entity_node(record: Any) -> EntityNode:
"""Parse a Kuzu entity node record, deserializing JSON attributes."""
if isinstance(record.get('attributes'), str):
try:
record['attributes'] = json.loads(record['attributes'])
except (json.JSONDecodeError, TypeError):
record['attributes'] = {}
elif record.get('attributes') is None:
record['attributes'] = {}
return entity_node_from_record(record)
def parse_kuzu_entity_edge(record: Any) -> EntityEdge:
"""Parse a Kuzu entity edge record, deserializing JSON attributes."""
if isinstance(record.get('attributes'), str):
try:
record['attributes'] = json.loads(record['attributes'])
except (json.JSONDecodeError, TypeError):
record['attributes'] = {}
elif record.get('attributes') is None:
record['attributes'] = {}
return entity_edge_from_record(record)
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/kuzu/operations/record_parsers.py",
"license": "Apache License 2.0",
"lines": 37,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
getzep/graphiti:graphiti_core/driver/kuzu/operations/saga_node_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from typing import Any
from graphiti_core.driver.driver import GraphProvider
from graphiti_core.driver.operations.saga_node_ops import SagaNodeOperations
from graphiti_core.driver.query_executor import QueryExecutor, Transaction
from graphiti_core.errors import NodeNotFoundError
from graphiti_core.helpers import parse_db_date
from graphiti_core.models.nodes.node_db_queries import SAGA_NODE_RETURN, get_saga_node_save_query
from graphiti_core.nodes import SagaNode
logger = logging.getLogger(__name__)
def _saga_node_from_record(record: Any) -> SagaNode:
return SagaNode(
uuid=record['uuid'],
name=record['name'],
group_id=record['group_id'],
created_at=parse_db_date(record['created_at']), # type: ignore[arg-type]
)
class KuzuSagaNodeOperations(SagaNodeOperations):
async def save(
self,
executor: QueryExecutor,
node: SagaNode,
tx: Transaction | None = None,
) -> None:
query = get_saga_node_save_query(GraphProvider.KUZU)
params: dict[str, Any] = {
'uuid': node.uuid,
'name': node.name,
'group_id': node.group_id,
'created_at': node.created_at,
}
if tx is not None:
await tx.run(query, **params)
else:
await executor.execute_query(query, **params)
logger.debug(f'Saved Saga Node to Graph: {node.uuid}')
async def save_bulk(
self,
executor: QueryExecutor,
nodes: list[SagaNode],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Kuzu doesn't support UNWIND - iterate and save individually
for node in nodes:
await self.save(executor, node, tx=tx)
async def delete(
self,
executor: QueryExecutor,
node: SagaNode,
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n:Saga {uuid: $uuid})
DETACH DELETE n
"""
if tx is not None:
await tx.run(query, uuid=node.uuid)
else:
await executor.execute_query(query, uuid=node.uuid)
logger.debug(f'Deleted Node: {node.uuid}')
async def delete_by_group_id(
self,
executor: QueryExecutor,
group_id: str,
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Kuzu doesn't support IN TRANSACTIONS OF - simple delete
query = """
MATCH (n:Saga {group_id: $group_id})
DETACH DELETE n
"""
if tx is not None:
await tx.run(query, group_id=group_id)
else:
await executor.execute_query(query, group_id=group_id)
async def delete_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Kuzu doesn't support IN TRANSACTIONS OF - simple delete
query = """
MATCH (n:Saga)
WHERE n.uuid IN $uuids
DETACH DELETE n
"""
if tx is not None:
await tx.run(query, uuids=uuids)
else:
await executor.execute_query(query, uuids=uuids)
async def get_by_uuid(
self,
executor: QueryExecutor,
uuid: str,
) -> SagaNode:
query = (
"""
MATCH (s:Saga {uuid: $uuid})
RETURN
"""
+ SAGA_NODE_RETURN
)
records, _, _ = await executor.execute_query(query, uuid=uuid)
nodes = [_saga_node_from_record(r) for r in records]
if len(nodes) == 0:
raise NodeNotFoundError(uuid)
return nodes[0]
async def get_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
) -> list[SagaNode]:
query = (
"""
MATCH (s:Saga)
WHERE s.uuid IN $uuids
RETURN
"""
+ SAGA_NODE_RETURN
)
records, _, _ = await executor.execute_query(query, uuids=uuids)
return [_saga_node_from_record(r) for r in records]
async def get_by_group_ids(
self,
executor: QueryExecutor,
group_ids: list[str],
limit: int | None = None,
uuid_cursor: str | None = None,
) -> list[SagaNode]:
cursor_clause = 'AND s.uuid < $uuid' if uuid_cursor else ''
limit_clause = 'LIMIT $limit' if limit is not None else ''
query = (
"""
MATCH (s:Saga)
WHERE s.group_id IN $group_ids
"""
+ cursor_clause
+ """
RETURN
"""
+ SAGA_NODE_RETURN
+ """
ORDER BY s.uuid DESC
"""
+ limit_clause
)
records, _, _ = await executor.execute_query(
query,
group_ids=group_ids,
uuid=uuid_cursor,
limit=limit,
)
return [_saga_node_from_record(r) for r in records]
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/kuzu/operations/saga_node_ops.py",
"license": "Apache License 2.0",
"lines": 169,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/driver/kuzu/operations/search_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from typing import Any
from graphiti_core.driver.driver import GraphProvider
from graphiti_core.driver.kuzu.operations.record_parsers import (
parse_kuzu_entity_edge,
parse_kuzu_entity_node,
)
from graphiti_core.driver.operations.search_ops import SearchOperations
from graphiti_core.driver.query_executor import QueryExecutor
from graphiti_core.driver.record_parsers import (
community_node_from_record,
episodic_node_from_record,
)
from graphiti_core.edges import EntityEdge
from graphiti_core.graph_queries import (
get_nodes_query,
get_relationships_query,
get_vector_cosine_func_query,
)
from graphiti_core.models.edges.edge_db_queries import get_entity_edge_return_query
from graphiti_core.models.nodes.node_db_queries import (
COMMUNITY_NODE_RETURN,
EPISODIC_NODE_RETURN,
get_entity_node_return_query,
)
from graphiti_core.nodes import CommunityNode, EntityNode, EpisodicNode
from graphiti_core.search.search_filters import (
SearchFilters,
edge_search_filter_query_constructor,
node_search_filter_query_constructor,
)
logger = logging.getLogger(__name__)
MAX_QUERY_LENGTH = 128
def _build_kuzu_fulltext_query(
query: str,
group_ids: list[str] | None = None, # noqa: ARG001
max_query_length: int = MAX_QUERY_LENGTH,
) -> str:
"""Build a fulltext query string for Kuzu.
Kuzu does not use Lucene syntax. The raw query is returned, truncated if it
exceeds *max_query_length* words.
"""
words = query.split()
if len(words) >= max_query_length:
words = words[:max_query_length]
truncated = ' '.join(words)
return truncated
class KuzuSearchOperations(SearchOperations):
# --- Node search ---
async def node_fulltext_search(
self,
executor: QueryExecutor,
query: str,
search_filter: SearchFilters,
group_ids: list[str] | None = None,
limit: int = 10,
) -> list[EntityNode]:
fuzzy_query = _build_kuzu_fulltext_query(query, group_ids)
if fuzzy_query == '':
return []
filter_queries, filter_params = node_search_filter_query_constructor(
search_filter, GraphProvider.KUZU
)
if group_ids is not None:
filter_queries.append('n.group_id IN $group_ids')
filter_params['group_ids'] = group_ids
filter_query = ''
if filter_queries:
filter_query = ' WHERE ' + (' AND '.join(filter_queries))
cypher = (
get_nodes_query(
'node_name_and_summary', '$query', limit=limit, provider=GraphProvider.KUZU
)
+ ' WITH node AS n, score'
+ filter_query
+ """
WITH n, score
ORDER BY score DESC
LIMIT $limit
RETURN
"""
+ get_entity_node_return_query(GraphProvider.KUZU)
)
records, _, _ = await executor.execute_query(
cypher,
query=fuzzy_query,
limit=limit,
**filter_params,
)
return [parse_kuzu_entity_node(r) for r in records]
async def node_similarity_search(
self,
executor: QueryExecutor,
search_vector: list[float],
search_filter: SearchFilters,
group_ids: list[str] | None = None,
limit: int = 10,
min_score: float = 0.6,
) -> list[EntityNode]:
filter_queries, filter_params = node_search_filter_query_constructor(
search_filter, GraphProvider.KUZU
)
if group_ids is not None:
filter_queries.append('n.group_id IN $group_ids')
filter_params['group_ids'] = group_ids
filter_query = ''
if filter_queries:
filter_query = ' WHERE ' + (' AND '.join(filter_queries))
search_vector_var = f'CAST($search_vector AS FLOAT[{len(search_vector)}])'
cypher = (
'MATCH (n:Entity)'
+ filter_query
+ """
WITH n, """
+ get_vector_cosine_func_query(
'n.name_embedding', search_vector_var, GraphProvider.KUZU
)
+ """ AS score
WHERE score > $min_score
RETURN
"""
+ get_entity_node_return_query(GraphProvider.KUZU)
+ """
ORDER BY score DESC
LIMIT $limit
"""
)
records, _, _ = await executor.execute_query(
cypher,
search_vector=search_vector,
limit=limit,
min_score=min_score,
**filter_params,
)
return [parse_kuzu_entity_node(r) for r in records]
async def node_bfs_search(
self,
executor: QueryExecutor,
origin_uuids: list[str],
search_filter: SearchFilters,
max_depth: int,
group_ids: list[str] | None = None,
limit: int = 10,
) -> list[EntityNode]:
if not origin_uuids or max_depth < 1:
return []
filter_queries, filter_params = node_search_filter_query_constructor(
search_filter, GraphProvider.KUZU
)
if group_ids is not None:
filter_queries.append('n.group_id IN $group_ids')
filter_params['group_ids'] = group_ids
filter_query = ''
if filter_queries:
filter_query = ' AND ' + (' AND '.join(filter_queries))
# Kuzu uses RelatesToNode_ as an intermediate node for edges, so each
# logical hop is actually 2 hops in the graph. We need 3 separate
# MATCH queries UNIONed together:
# 1. Episodic -> MENTIONS -> Entity (direct mention)
# 2. Entity -> RELATES_TO*{2..depth*2} -> Entity (entity traversal)
# 3. Episodic -> MENTIONS -> Entity -> RELATES_TO*{2..(depth-1)*2} -> Entity (combined)
all_records: list[Any] = []
for origin_uuid in origin_uuids:
# Query 1: From Episodic origins via MENTIONS
cypher_episodic = (
"""
MATCH (origin:Episodic {uuid: $origin_uuid})-[:MENTIONS]->(n:Entity)
WHERE n.group_id = origin.group_id
"""
+ filter_query
+ """
RETURN
"""
+ get_entity_node_return_query(GraphProvider.KUZU)
+ """
LIMIT $limit
"""
)
records, _, _ = await executor.execute_query(
cypher_episodic,
origin_uuid=origin_uuid,
limit=limit,
**filter_params,
)
all_records.extend(records)
# Query 2: From Entity origins via RELATES_TO (doubled depth)
doubled_depth = max_depth * 2
cypher_entity = (
f"""
MATCH (origin:Entity {{uuid: $origin_uuid}})-[:RELATES_TO*2..{doubled_depth}]->(n:Entity)
WHERE n.group_id = origin.group_id
"""
+ filter_query
+ """
RETURN
"""
+ get_entity_node_return_query(GraphProvider.KUZU)
+ """
LIMIT $limit
"""
)
records, _, _ = await executor.execute_query(
cypher_entity,
origin_uuid=origin_uuid,
limit=limit,
**filter_params,
)
all_records.extend(records)
# Query 3: From Episodic through Entity (only if max_depth > 1)
if max_depth > 1:
combined_depth = (max_depth - 1) * 2
cypher_combined = (
f"""
MATCH (origin:Episodic {{uuid: $origin_uuid}})-[:MENTIONS]->(:Entity)-[:RELATES_TO*2..{combined_depth}]->(n:Entity)
WHERE n.group_id = origin.group_id
"""
+ filter_query
+ """
RETURN
"""
+ get_entity_node_return_query(GraphProvider.KUZU)
+ """
LIMIT $limit
"""
)
records, _, _ = await executor.execute_query(
cypher_combined,
origin_uuid=origin_uuid,
limit=limit,
**filter_params,
)
all_records.extend(records)
# Deduplicate by uuid and limit
seen: set[str] = set()
unique_nodes: list[EntityNode] = []
for r in all_records:
node = parse_kuzu_entity_node(r)
if node.uuid not in seen:
seen.add(node.uuid)
unique_nodes.append(node)
if len(unique_nodes) >= limit:
break
return unique_nodes
# --- Edge search ---
async def edge_fulltext_search(
self,
executor: QueryExecutor,
query: str,
search_filter: SearchFilters,
group_ids: list[str] | None = None,
limit: int = 10,
) -> list[EntityEdge]:
fuzzy_query = _build_kuzu_fulltext_query(query, group_ids)
if fuzzy_query == '':
return []
filter_queries, filter_params = edge_search_filter_query_constructor(
search_filter, GraphProvider.KUZU
)
if group_ids is not None:
filter_queries.append('e.group_id IN $group_ids')
filter_params['group_ids'] = group_ids
filter_query = ''
if filter_queries:
filter_query = ' WHERE ' + (' AND '.join(filter_queries))
# Kuzu FTS for edges queries the RelatesToNode_ label, then we match
# the full pattern to get source (n) and target (m) Entity nodes.
cypher = (
get_relationships_query('edge_name_and_fact', limit=limit, provider=GraphProvider.KUZU)
+ """
WITH node AS e, score
MATCH (n:Entity)-[:RELATES_TO]->(e)-[:RELATES_TO]->(m:Entity)
"""
+ filter_query
+ """
WITH e, score, n, m
RETURN
"""
+ get_entity_edge_return_query(GraphProvider.KUZU)
+ """
ORDER BY score DESC
LIMIT $limit
"""
)
records, _, _ = await executor.execute_query(
cypher,
query=fuzzy_query,
limit=limit,
**filter_params,
)
return [parse_kuzu_entity_edge(r) for r in records]
async def edge_similarity_search(
self,
executor: QueryExecutor,
search_vector: list[float],
source_node_uuid: str | None,
target_node_uuid: str | None,
search_filter: SearchFilters,
group_ids: list[str] | None = None,
limit: int = 10,
min_score: float = 0.6,
) -> list[EntityEdge]:
filter_queries, filter_params = edge_search_filter_query_constructor(
search_filter, GraphProvider.KUZU
)
if group_ids is not None:
filter_queries.append('e.group_id IN $group_ids')
filter_params['group_ids'] = group_ids
if source_node_uuid is not None:
filter_params['source_uuid'] = source_node_uuid
filter_queries.append('n.uuid = $source_uuid')
if target_node_uuid is not None:
filter_params['target_uuid'] = target_node_uuid
filter_queries.append('m.uuid = $target_uuid')
filter_query = ''
if filter_queries:
filter_query = ' WHERE ' + (' AND '.join(filter_queries))
search_vector_var = f'CAST($search_vector AS FLOAT[{len(search_vector)}])'
cypher = (
'MATCH (n:Entity)-[:RELATES_TO]->(e:RelatesToNode_)-[:RELATES_TO]->(m:Entity)'
+ filter_query
+ """
WITH DISTINCT e, n, m, """
+ get_vector_cosine_func_query(
'e.fact_embedding', search_vector_var, GraphProvider.KUZU
)
+ """ AS score
WHERE score > $min_score
RETURN
"""
+ get_entity_edge_return_query(GraphProvider.KUZU)
+ """
ORDER BY score DESC
LIMIT $limit
"""
)
records, _, _ = await executor.execute_query(
cypher,
search_vector=search_vector,
limit=limit,
min_score=min_score,
**filter_params,
)
return [parse_kuzu_entity_edge(r) for r in records]
async def edge_bfs_search(
self,
executor: QueryExecutor,
origin_uuids: list[str],
max_depth: int,
search_filter: SearchFilters,
group_ids: list[str] | None = None,
limit: int = 10,
) -> list[EntityEdge]:
if not origin_uuids:
return []
filter_queries, filter_params = edge_search_filter_query_constructor(
search_filter, GraphProvider.KUZU
)
if group_ids is not None:
filter_queries.append('e.group_id IN $group_ids')
filter_params['group_ids'] = group_ids
filter_query = ''
if filter_queries:
filter_query = ' WHERE ' + (' AND '.join(filter_queries))
# Because RelatesToNode_ doubles every hop, we need separate queries
# similar to node BFS.
all_records: list[Any] = []
doubled_depth = max_depth * 2
for origin_uuid in origin_uuids:
# From Entity origins: traverse doubled depth to reach RelatesToNode_ edges
cypher_entity = (
f"""
MATCH (origin:Entity {{uuid: $origin_uuid}})-[:RELATES_TO*2..{doubled_depth}]->(e:RelatesToNode_)
MATCH (n:Entity)-[:RELATES_TO]->(e)-[:RELATES_TO]->(m:Entity)
"""
+ filter_query
+ """
RETURN DISTINCT
"""
+ get_entity_edge_return_query(GraphProvider.KUZU)
+ """
LIMIT $limit
"""
)
records, _, _ = await executor.execute_query(
cypher_entity,
origin_uuid=origin_uuid,
limit=limit,
**filter_params,
)
all_records.extend(records)
# From Episodic origins: go through MENTIONS to Entity, then traverse
cypher_episodic = (
"""
MATCH (origin:Episodic {uuid: $origin_uuid})-[:MENTIONS]->(start:Entity)-[:RELATES_TO]->(e:RelatesToNode_)-[:RELATES_TO]->(m:Entity)
MATCH (n:Entity)-[:RELATES_TO]->(e)
"""
+ filter_query
+ """
RETURN DISTINCT
"""
+ get_entity_edge_return_query(GraphProvider.KUZU)
+ """
LIMIT $limit
"""
)
records, _, _ = await executor.execute_query(
cypher_episodic,
origin_uuid=origin_uuid,
limit=limit,
**filter_params,
)
all_records.extend(records)
# Deduplicate by uuid and limit
seen: set[str] = set()
unique_edges: list[EntityEdge] = []
for r in all_records:
edge = parse_kuzu_entity_edge(r)
if edge.uuid not in seen:
seen.add(edge.uuid)
unique_edges.append(edge)
if len(unique_edges) >= limit:
break
return unique_edges
# --- Episode search ---
async def episode_fulltext_search(
self,
executor: QueryExecutor,
query: str,
search_filter: SearchFilters, # noqa: ARG002
group_ids: list[str] | None = None,
limit: int = 10,
) -> list[EpisodicNode]:
fuzzy_query = _build_kuzu_fulltext_query(query, group_ids)
if fuzzy_query == '':
return []
filter_params: dict[str, Any] = {}
group_filter_query = ''
if group_ids is not None:
group_filter_query += '\nAND e.group_id IN $group_ids'
filter_params['group_ids'] = group_ids
cypher = (
get_nodes_query('episode_content', '$query', limit=limit, provider=GraphProvider.KUZU)
+ """
WITH node AS episode, score
MATCH (e:Episodic)
WHERE e.uuid = episode.uuid
"""
+ group_filter_query
+ """
RETURN
"""
+ EPISODIC_NODE_RETURN
+ """
ORDER BY score DESC
LIMIT $limit
"""
)
records, _, _ = await executor.execute_query(
cypher, query=fuzzy_query, limit=limit, **filter_params
)
return [episodic_node_from_record(r) for r in records]
# --- Community search ---
async def community_fulltext_search(
self,
executor: QueryExecutor,
query: str,
group_ids: list[str] | None = None,
limit: int = 10,
) -> list[CommunityNode]:
fuzzy_query = _build_kuzu_fulltext_query(query, group_ids)
if fuzzy_query == '':
return []
filter_params: dict[str, Any] = {}
group_filter_query = ''
if group_ids is not None:
group_filter_query = 'WHERE c.group_id IN $group_ids'
filter_params['group_ids'] = group_ids
cypher = (
get_nodes_query('community_name', '$query', limit=limit, provider=GraphProvider.KUZU)
+ """
WITH node AS c, score
WITH c, score
"""
+ group_filter_query
+ """
RETURN
"""
+ COMMUNITY_NODE_RETURN
+ """
ORDER BY score DESC
LIMIT $limit
"""
)
records, _, _ = await executor.execute_query(
cypher, query=fuzzy_query, limit=limit, **filter_params
)
return [community_node_from_record(r) for r in records]
async def community_similarity_search(
self,
executor: QueryExecutor,
search_vector: list[float],
group_ids: list[str] | None = None,
limit: int = 10,
min_score: float = 0.6,
) -> list[CommunityNode]:
query_params: dict[str, Any] = {}
group_filter_query = ''
if group_ids is not None:
group_filter_query += ' WHERE c.group_id IN $group_ids'
query_params['group_ids'] = group_ids
search_vector_var = f'CAST($search_vector AS FLOAT[{len(search_vector)}])'
cypher = (
'MATCH (c:Community)'
+ group_filter_query
+ """
WITH c,
"""
+ get_vector_cosine_func_query(
'c.name_embedding', search_vector_var, GraphProvider.KUZU
)
+ """ AS score
WHERE score > $min_score
RETURN
"""
+ COMMUNITY_NODE_RETURN
+ """
ORDER BY score DESC
LIMIT $limit
"""
)
records, _, _ = await executor.execute_query(
cypher,
search_vector=search_vector,
limit=limit,
min_score=min_score,
**query_params,
)
return [community_node_from_record(r) for r in records]
# --- Rerankers ---
async def node_distance_reranker(
self,
executor: QueryExecutor,
node_uuids: list[str],
center_node_uuid: str,
min_score: float = 0,
) -> list[EntityNode]:
filtered_uuids = [u for u in node_uuids if u != center_node_uuid]
scores: dict[str, float] = {center_node_uuid: 0.0}
# Kuzu does not support UNWIND, so query each UUID individually
cypher = """
MATCH (center:Entity {uuid: $center_uuid})-[:RELATES_TO]->(:RelatesToNode_)-[:RELATES_TO]-(n:Entity {uuid: $node_uuid})
RETURN 1 AS score, n.uuid AS uuid
"""
for node_uuid in filtered_uuids:
results, _, _ = await executor.execute_query(
cypher,
node_uuid=node_uuid,
center_uuid=center_node_uuid,
)
for result in results:
scores[result['uuid']] = result['score']
for uuid in filtered_uuids:
if uuid not in scores:
scores[uuid] = float('inf')
filtered_uuids.sort(key=lambda cur_uuid: scores[cur_uuid])
if center_node_uuid in node_uuids:
scores[center_node_uuid] = 0.1
filtered_uuids = [center_node_uuid] + filtered_uuids
reranked_uuids = [u for u in filtered_uuids if (1 / scores[u]) >= min_score]
if not reranked_uuids:
return []
# Fetch the actual EntityNode objects
get_query = """
MATCH (n:Entity)
WHERE n.uuid IN $uuids
RETURN
""" + get_entity_node_return_query(GraphProvider.KUZU)
records, _, _ = await executor.execute_query(get_query, uuids=reranked_uuids)
node_map = {r['uuid']: parse_kuzu_entity_node(r) for r in records}
return [node_map[u] for u in reranked_uuids if u in node_map]
async def episode_mentions_reranker(
self,
executor: QueryExecutor,
node_uuids: list[str],
min_score: float = 0,
) -> list[EntityNode]:
if not node_uuids:
return []
scores: dict[str, float] = {}
# Kuzu does not support UNWIND, so query each UUID individually
cypher = """
MATCH (episode:Episodic)-[r:MENTIONS]->(n:Entity {uuid: $node_uuid})
RETURN count(*) AS score, n.uuid AS uuid
"""
for node_uuid in node_uuids:
results, _, _ = await executor.execute_query(
cypher,
node_uuid=node_uuid,
)
for result in results:
scores[result['uuid']] = result['score']
for uuid in node_uuids:
if uuid not in scores:
scores[uuid] = float('inf')
sorted_uuids = list(node_uuids)
sorted_uuids.sort(key=lambda cur_uuid: scores[cur_uuid])
reranked_uuids = [u for u in sorted_uuids if scores[u] >= min_score]
if not reranked_uuids:
return []
# Fetch the actual EntityNode objects
get_query = """
MATCH (n:Entity)
WHERE n.uuid IN $uuids
RETURN
""" + get_entity_node_return_query(GraphProvider.KUZU)
records, _, _ = await executor.execute_query(get_query, uuids=reranked_uuids)
node_map = {r['uuid']: parse_kuzu_entity_node(r) for r in records}
return [node_map[u] for u in reranked_uuids if u in node_map]
# --- Filter builders ---
def build_node_search_filters(self, search_filters: SearchFilters) -> Any:
filter_queries, filter_params = node_search_filter_query_constructor(
search_filters, GraphProvider.KUZU
)
return {'filter_queries': filter_queries, 'filter_params': filter_params}
def build_edge_search_filters(self, search_filters: SearchFilters) -> Any:
filter_queries, filter_params = edge_search_filter_query_constructor(
search_filters, GraphProvider.KUZU
)
return {'filter_queries': filter_queries, 'filter_params': filter_params}
# --- Fulltext query builder ---
def build_fulltext_query(
self,
query: str,
group_ids: list[str] | None = None,
max_query_length: int = 8000,
) -> str:
return _build_kuzu_fulltext_query(query, group_ids, max_query_length)
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/kuzu/operations/search_ops.py",
"license": "Apache License 2.0",
"lines": 649,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/driver/neptune/operations/community_edge_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from typing import Any
from graphiti_core.driver.driver import GraphProvider
from graphiti_core.driver.operations.community_edge_ops import CommunityEdgeOperations
from graphiti_core.driver.query_executor import QueryExecutor, Transaction
from graphiti_core.edges import CommunityEdge
from graphiti_core.errors import EdgeNotFoundError
from graphiti_core.helpers import parse_db_date
from graphiti_core.models.edges.edge_db_queries import (
COMMUNITY_EDGE_RETURN,
get_community_edge_save_query,
)
logger = logging.getLogger(__name__)
def _community_edge_from_record(record: Any) -> CommunityEdge:
return CommunityEdge(
uuid=record['uuid'],
group_id=record['group_id'],
source_node_uuid=record['source_node_uuid'],
target_node_uuid=record['target_node_uuid'],
created_at=parse_db_date(record['created_at']), # type: ignore[arg-type]
)
class NeptuneCommunityEdgeOperations(CommunityEdgeOperations):
async def save(
self,
executor: QueryExecutor,
edge: CommunityEdge,
tx: Transaction | None = None,
) -> None:
query = get_community_edge_save_query(GraphProvider.NEPTUNE)
params: dict[str, Any] = {
'community_uuid': edge.source_node_uuid,
'entity_uuid': edge.target_node_uuid,
'uuid': edge.uuid,
'group_id': edge.group_id,
'created_at': edge.created_at,
}
if tx is not None:
await tx.run(query, **params)
else:
await executor.execute_query(query, **params)
logger.debug(f'Saved Edge to Graph: {edge.uuid}')
async def delete(
self,
executor: QueryExecutor,
edge: CommunityEdge,
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n)-[e:MENTIONS|RELATES_TO|HAS_MEMBER {uuid: $uuid}]->(m)
DELETE e
"""
if tx is not None:
await tx.run(query, uuid=edge.uuid)
else:
await executor.execute_query(query, uuid=edge.uuid)
logger.debug(f'Deleted Edge: {edge.uuid}')
async def delete_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n)-[e:MENTIONS|RELATES_TO|HAS_MEMBER]->(m)
WHERE e.uuid IN $uuids
DELETE e
"""
if tx is not None:
await tx.run(query, uuids=uuids)
else:
await executor.execute_query(query, uuids=uuids)
async def get_by_uuid(
self,
executor: QueryExecutor,
uuid: str,
) -> CommunityEdge:
query = (
"""
MATCH (n:Community)-[e:HAS_MEMBER {uuid: $uuid}]->(m)
RETURN
"""
+ COMMUNITY_EDGE_RETURN
)
records, _, _ = await executor.execute_query(query, uuid=uuid)
edges = [_community_edge_from_record(r) for r in records]
if len(edges) == 0:
raise EdgeNotFoundError(uuid)
return edges[0]
async def get_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
) -> list[CommunityEdge]:
query = (
"""
MATCH (n:Community)-[e:HAS_MEMBER]->(m)
WHERE e.uuid IN $uuids
RETURN
"""
+ COMMUNITY_EDGE_RETURN
)
records, _, _ = await executor.execute_query(query, uuids=uuids)
return [_community_edge_from_record(r) for r in records]
async def get_by_group_ids(
self,
executor: QueryExecutor,
group_ids: list[str],
limit: int | None = None,
uuid_cursor: str | None = None,
) -> list[CommunityEdge]:
cursor_clause = 'AND e.uuid < $uuid' if uuid_cursor else ''
limit_clause = 'LIMIT $limit' if limit is not None else ''
query = (
"""
MATCH (n:Community)-[e:HAS_MEMBER]->(m)
WHERE e.group_id IN $group_ids
"""
+ cursor_clause
+ """
RETURN
"""
+ COMMUNITY_EDGE_RETURN
+ """
ORDER BY e.uuid DESC
"""
+ limit_clause
)
records, _, _ = await executor.execute_query(
query,
group_ids=group_ids,
uuid=uuid_cursor,
limit=limit,
)
return [_community_edge_from_record(r) for r in records]
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/neptune/operations/community_edge_ops.py",
"license": "Apache License 2.0",
"lines": 146,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
getzep/graphiti:graphiti_core/driver/neptune/operations/community_node_ops.py | """
Copyright 2024, Zep Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any
from graphiti_core.driver.driver import GraphProvider
from graphiti_core.driver.operations.community_node_ops import CommunityNodeOperations
from graphiti_core.driver.query_executor import QueryExecutor, Transaction
from graphiti_core.driver.record_parsers import community_node_from_record
from graphiti_core.errors import NodeNotFoundError
from graphiti_core.models.nodes.node_db_queries import (
COMMUNITY_NODE_RETURN_NEPTUNE,
get_community_node_save_query,
)
from graphiti_core.nodes import CommunityNode
if TYPE_CHECKING:
from graphiti_core.driver.neptune_driver import NeptuneDriver
logger = logging.getLogger(__name__)
class NeptuneCommunityNodeOperations(CommunityNodeOperations):
def __init__(self, driver: NeptuneDriver | None = None):
self._driver = driver
async def save(
self,
executor: QueryExecutor,
node: CommunityNode,
tx: Transaction | None = None,
) -> None:
query = get_community_node_save_query(GraphProvider.NEPTUNE)
params: dict[str, Any] = {
'uuid': node.uuid,
'name': node.name,
'group_id': node.group_id,
'summary': node.summary,
'name_embedding': node.name_embedding,
'created_at': node.created_at,
}
if tx is not None:
await tx.run(query, **params)
else:
await executor.execute_query(query, **params)
if self._driver is not None:
self._driver.save_to_aoss(
'community_name',
[{'uuid': node.uuid, 'name': node.name, 'group_id': node.group_id}],
)
logger.debug(f'Saved Community Node to Graph: {node.uuid}')
async def save_bulk(
self,
executor: QueryExecutor,
nodes: list[CommunityNode],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
# Community nodes saved individually since bulk query not in existing codebase
for node in nodes:
await self.save(executor, node, tx=tx)
async def delete(
self,
executor: QueryExecutor,
node: CommunityNode,
tx: Transaction | None = None,
) -> None:
query = """
MATCH (n {uuid: $uuid})
WHERE n:Entity OR n:Episodic OR n:Community
DETACH DELETE n
"""
if tx is not None:
await tx.run(query, uuid=node.uuid)
else:
await executor.execute_query(query, uuid=node.uuid)
logger.debug(f'Deleted Node: {node.uuid}')
async def delete_by_group_id(
self,
executor: QueryExecutor,
group_id: str,
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
query = """
MATCH (n:Community {group_id: $group_id})
DETACH DELETE n
"""
if tx is not None:
await tx.run(query, group_id=group_id)
else:
await executor.execute_query(query, group_id=group_id)
async def delete_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
tx: Transaction | None = None,
batch_size: int = 100,
) -> None:
query = """
MATCH (n:Community)
WHERE n.uuid IN $uuids
DETACH DELETE n
"""
if tx is not None:
await tx.run(query, uuids=uuids)
else:
await executor.execute_query(query, uuids=uuids)
async def get_by_uuid(
self,
executor: QueryExecutor,
uuid: str,
) -> CommunityNode:
query = (
"""
MATCH (n:Community {uuid: $uuid})
RETURN
"""
+ COMMUNITY_NODE_RETURN_NEPTUNE
)
records, _, _ = await executor.execute_query(query, uuid=uuid)
nodes = [community_node_from_record(r) for r in records]
if len(nodes) == 0:
raise NodeNotFoundError(uuid)
return nodes[0]
async def get_by_uuids(
self,
executor: QueryExecutor,
uuids: list[str],
) -> list[CommunityNode]:
query = (
"""
MATCH (n:Community)
WHERE n.uuid IN $uuids
RETURN
"""
+ COMMUNITY_NODE_RETURN_NEPTUNE
)
records, _, _ = await executor.execute_query(query, uuids=uuids)
return [community_node_from_record(r) for r in records]
async def get_by_group_ids(
self,
executor: QueryExecutor,
group_ids: list[str],
limit: int | None = None,
uuid_cursor: str | None = None,
) -> list[CommunityNode]:
cursor_clause = 'AND n.uuid < $uuid' if uuid_cursor else ''
limit_clause = 'LIMIT $limit' if limit is not None else ''
query = (
"""
MATCH (n:Community)
WHERE n.group_id IN $group_ids
"""
+ cursor_clause
+ """
RETURN
"""
+ COMMUNITY_NODE_RETURN_NEPTUNE
+ """
ORDER BY n.uuid DESC
"""
+ limit_clause
)
records, _, _ = await executor.execute_query(
query,
group_ids=group_ids,
uuid=uuid_cursor,
limit=limit,
)
return [community_node_from_record(r) for r in records]
async def load_name_embedding(
self,
executor: QueryExecutor,
node: CommunityNode,
) -> None:
query = """
MATCH (n:Community {uuid: $uuid})
RETURN [x IN split(n.name_embedding, ",") | toFloat(x)] AS name_embedding
"""
records, _, _ = await executor.execute_query(query, uuid=node.uuid)
if len(records) == 0:
raise NodeNotFoundError(node.uuid)
node.name_embedding = records[0]['name_embedding']
| {
"repo_id": "getzep/graphiti",
"file_path": "graphiti_core/driver/neptune/operations/community_node_ops.py",
"license": "Apache License 2.0",
"lines": 189,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.