code stringlengths 17 6.64M |
|---|
def main():
parser = argparse.ArgumentParser(description='Convert keys in official pretrained segformer to MMSegmentation style.')
parser.add_argument('src', help='src model path or url')
parser.add_argument('dst', help='save path')
args = parser.parse_args()
checkpoint = CheckpointLoader.load_che... |
def convert_vit(ckpt):
new_ckpt = OrderedDict()
for (k, v) in ckpt.items():
if k.startswith('head'):
continue
if k.startswith('norm'):
new_k = k.replace('norm.', 'ln1.')
elif k.startswith('patch_embed'):
if ('proj' in k):
new_k = k.re... |
def main():
parser = argparse.ArgumentParser(description='Convert keys in timm pretrained vit models to MMSegmentation style.')
parser.add_argument('src', help='src model path or url')
parser.add_argument('dst', help='save path')
args = parser.parse_args()
checkpoint = CheckpointLoader.load_checkp... |
def parse_args():
parser = argparse.ArgumentParser(description='Print the whole config')
parser.add_argument('config', help='config file path')
parser.add_argument('--graph', action='store_true', help='print the models graph')
parser.add_argument('--options', nargs='+', action=DictAction, help='argume... |
def main():
args = parse_args()
cfg = Config.fromfile(args.config)
if (args.options is not None):
cfg.merge_from_dict(args.options)
print(f'''Config:
{cfg.pretty_text}''')
cfg.dump('example.py')
if args.graph:
model = init_segmentor(args.config, device='cpu')
print(f'''... |
def parse_args():
parser = argparse.ArgumentParser(description='Process a checkpoint to be published')
parser.add_argument('in_file', help='input checkpoint filename')
parser.add_argument('out_file', help='output checkpoint filename')
args = parser.parse_args()
return args
|
def process_checkpoint(in_file, out_file):
checkpoint = torch.load(in_file, map_location='cpu')
if ('optimizer' in checkpoint):
del checkpoint['optimizer']
torch.save(checkpoint, out_file)
sha = subprocess.check_output(['sha256sum', out_file]).decode()
final_file = (out_file.rstrip('.pth')... |
def main():
args = parse_args()
process_checkpoint(args.in_file, args.out_file)
|
def digit_version(version_str):
digit_version = []
for x in version_str.split('.'):
if x.isdigit():
digit_version.append(int(x))
elif (x.find('rc') != (- 1)):
patch_version = x.split('rc')
digit_version.append((int(patch_version[0]) - 1))
digit_v... |
def check_torch_version():
torch_minimum_version = '1.8.0'
torch_version = digit_version(torch.__version__)
assert (torch_version >= digit_version(torch_minimum_version)), f'Torch=={torch.__version__} is not support for converting to torchscript. Please install pytorch>={torch_minimum_version}.'
|
def _convert_batchnorm(module):
module_output = module
if isinstance(module, torch.nn.SyncBatchNorm):
module_output = torch.nn.BatchNorm2d(module.num_features, module.eps, module.momentum, module.affine, module.track_running_stats)
if module.affine:
module_output.weight.data = modu... |
def _demo_mm_inputs(input_shape, num_classes):
'Create a superset of inputs needed to run test or train batches.\n\n Args:\n input_shape (tuple):\n input batch dimensions\n num_classes (int):\n number of semantic classes\n '
(N, C, H, W) = input_shape
rng = np.ran... |
def pytorch2libtorch(model, input_shape, show=False, output_file='tmp.pt', verify=False):
'Export Pytorch model to TorchScript model and verify the outputs are\n same between Pytorch and TorchScript.\n\n Args:\n model (nn.Module): Pytorch model we want to export.\n input_shape (tuple): Use thi... |
def parse_args():
parser = argparse.ArgumentParser(description='Convert MMSeg to TorchScript')
parser.add_argument('config', help='test config file path')
parser.add_argument('--checkpoint', help='checkpoint file', default=None)
parser.add_argument('--show', action='store_true', help='show TorchScript... |
def parse_args():
parser = argparse.ArgumentParser(description='mmseg test (and eval) a model')
parser.add_argument('config', help='test config file path')
parser.add_argument('checkpoint', help='checkpoint file')
parser.add_argument('--work-dir', help='if specified, the evaluation metric results will... |
def main():
args = parse_args()
assert (args.out or args.eval or args.format_only or args.show or args.show_dir), 'Please specify at least one operation (save/eval/format/show the results / save the results) with the argument "--out", "--eval", "--format-only", "--show" or "--show-dir"'
if (args.eval and ... |
def mmseg2torchserve(config_file: str, checkpoint_file: str, output_folder: str, model_name: str, model_version: str='1.0', force: bool=False):
"Converts mmsegmentation model (config + checkpoint) to TorchServe\n `.mar`.\n\n Args:\n config_file:\n In MMSegmentation config format.\n ... |
def parse_args():
parser = ArgumentParser(description='Convert mmseg models to TorchServe `.mar` format.')
parser.add_argument('config', type=str, help='config file path')
parser.add_argument('checkpoint', type=str, help='checkpoint file path')
parser.add_argument('--output-folder', type=str, required... |
class MMsegHandler(BaseHandler):
def initialize(self, context):
properties = context.system_properties
self.map_location = ('cuda' if torch.cuda.is_available() else 'cpu')
self.device = torch.device((((self.map_location + ':') + str(properties.get('gpu_id'))) if torch.cuda.is_available() ... |
def parse_args():
parser = ArgumentParser(description='Compare result of torchserve and pytorch,and visualize them.')
parser.add_argument('img', help='Image file')
parser.add_argument('config', help='Config file')
parser.add_argument('checkpoint', help='Checkpoint file')
parser.add_argument('model... |
def main(args):
url = ((('http://' + args.inference_addr) + '/predictions/') + args.model_name)
with open(args.img, 'rb') as image:
tmp_res = requests.post(url, image)
content = tmp_res.content
if args.result_image:
with open(args.result_image, 'wb') as out_image:
out_image... |
def parse_args():
parser = argparse.ArgumentParser(description='Train a segmentor')
parser.add_argument('config', help='train config file path')
parser.add_argument('--work-dir', help='the dir to save logs and models')
parser.add_argument('--load-from', help='the checkpoint file to load weights from')... |
def main():
args = parse_args()
cfg = Config.fromfile(args.config)
if (args.options is not None):
cfg.merge_from_dict(args.options)
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
if (args.work_dir is not None):
cfg.work_dir = args.work_dir
e... |
def parse_args():
parser = argparse.ArgumentParser(description='Train a segmentor')
parser.add_argument('config', help='train config file path')
parser.add_argument('--work-dir', help='the dir to save logs and models')
parser.add_argument('--load-from', help='the checkpoint file to load weights from')... |
def main():
args = parse_args()
cfg = Config.fromfile(args.config)
if (args.options is not None):
cfg.merge_from_dict(args.options)
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
if (args.work_dir is not None):
cfg.work_dir = args.work_dir
e... |
def _parse_args():
(args_config, remaining) = config_parser.parse_known_args()
if args_config.config:
with open(args_config.config, 'r') as f:
cfg = yaml.safe_load(f)
parser.set_defaults(**cfg)
args = parser.parse_args(remaining)
args_text = yaml.safe_dump(args.__dict__... |
def main():
setup_default_logging()
(args, args_text) = _parse_args()
if args.log_wandb:
if has_wandb:
wandb.init(project=args.experiment, config=args)
else:
_logger.warning("You've requested to log metrics to wandb but package not found. Metrics not being logged to... |
def train_one_epoch(epoch, model, loader, optimizer, loss_fn, args, lr_scheduler=None, saver=None, output_dir=None, amp_autocast=suppress, loss_scaler=None, model_ema=None, mixup_fn=None):
if (args.mixup_off_epoch and (epoch >= args.mixup_off_epoch)):
if (args.prefetcher and loader.mixup_enabled):
... |
def validate(model, loader, loss_fn, args, amp_autocast=suppress, log_suffix=''):
batch_time_m = AverageMeter()
losses_m = AverageMeter()
top1_m = AverageMeter()
top5_m = AverageMeter()
model.eval()
end = time.time()
last_idx = (len(loader) - 1)
with torch.no_grad():
for (batch... |
def get_command_args():
parser = argparse.ArgumentParser()
parser.add_argument('--dataname', '-n', help='name of generated', default='sewfactory')
parser.add_argument('--config', '-c', help='config file for dataset resource', default='meta_infos\\configs\\data_sim_configs.json')
parser.add_argument('-... |
def init_mayapy():
try:
print('Initilializing Maya tools...')
maya.standalone.initialize()
print('Load plugins')
cmds.loadPlugin('mtoa.mll')
cmds.loadPlugin('objExport.mll')
cmds.loadPlugin('fbxmaya.mll')
except Exception as e:
print(e)
pass
|
def stop_mayapy():
maya.standalone.uninitialize()
print('Maya stopped')
|
def get_command_args():
parser = argparse.ArgumentParser()
parser.add_argument('--dataname', '-n', help='name of generated', default='deepfashion')
parser.add_argument('--config', '-c', help='config file for dataset resource', default='meta_infos\\configs\\data_sim_configs.json')
parser.add_argument('... |
def init_mayapy():
try:
print('Initilializing Maya tools...')
maya.standalone.initialize()
print('Load plugins')
cmds.loadPlugin('mtoa.mll')
cmds.loadPlugin('objExport.mll')
cmds.loadPlugin('fbxmaya.mll')
except Exception as e:
print(e)
pass
|
def stop_mayapy():
maya.standalone.uninitialize()
print('Maya stopped')
|
def get_command_args():
'command line arguments to control the run'
parser = argparse.ArgumentParser()
parser.add_argument('--base-config', '-c', help='template config with parameters used for animation', default='meta_infos\\configs\\anime_config.json')
parser.add_argument('--base-fbx', '-f', help='i... |
def _create_data_folder(path, props):
' Create a new directory to put dataset in \n & generate appropriate name & update dataset properties\n '
if ('data_folder' in props):
props['name'] = (props['data_folder'] + '_regen')
data_folder = props['name']
else:
data_folder = P... |
def generate(path, templates_path, props):
'Generates a synthetic dataset of patterns with given properties\n Params:\n path : path to folder to put a new dataset into\n templates_path : path to folder with pattern templates\n props : an instance of DatasetProperties class\... |
def get_command_args():
'command line arguments to control the run'
parser = argparse.ArgumentParser()
parser.add_argument('--config', '-c', help='pattern config', type=str, default='meta_infos/configs/dataset_config.yaml')
parser.add_argument('--out', '-o', help='folder to save generated patterns', t... |
class Properties():
'Keeps, loads, and saves cofiguration & statistic information\n Supports gets&sets as a dictionary\n Provides shortcuts for batch-init configurations\n\n One of the usages -- store system-dependent basic cofiguration\n '
def __init__(self, filename='', clean_stats=... |
class Garment(object):
def __init__(self, name, type):
self.name = name
self.type = type
def to_filter_string(self):
return ((self.type + '/') + self.name)
def to_rel_folder(self):
return os.path.join(self.type, self.name)
def to_abs_path(self, data_root):
r... |
class GarmentMaterials(object):
'\n Describes the materials for rendering.\n mtl_scene: a scene file contains all the supportted mtls\n\n Note: support 4 kinds of mtls now (default, cotton, velvet, silk).\n Need input mtl resources first. \n Assumes: the code is based ... |
class MayaScene(object):
'\n Decribes scene setup that includes:\n # Mtl(s) & light(s): preload, donot move\n * floor & camera(s): preload, ajdust according to the body\n Assumes \n * body the scene revolved aroung faces z+ direction\n '
def __init__(self, pr... |
class GarmentPlayblast(object):
'\n 1. load scene\n 2. load smplbody \n 3. load garment\n 4. run simulation\n 5. run render\n '
def __init__(self, conf):
self.config = conf
self.default_center = [0.037, (- 29.154), 2.363]
self.smooth_cameras = {}
... |
class PredictPlayblast(GarmentPlayblast):
def __init__(self, conf):
super(PredictPlayblast, self).__init__(conf)
self.set_panel_render_camera()
def set_panel_render_camera(self):
self.panel_cameras = {}
(cam, camshape) = cmds.camera(aspectRatio=1, name='panel_front')
... |
def load_plugin():
'\n Forces loading Qualoth plugin into Maya. \n Note that plugin should be installed and licensed to use it!\n Inquire here: http://www.fxgear.net/vfxpricing\n '
maya_year = int(mel.eval('getApplicationVersionAsFloat'))
plugin_name = (('qualoth_' + str(maya_year)... |
def qlCreatePattern(curves_group):
'\n Converts given 2D closed curve to a flat geometry piece\n '
objects_before = cmds.ls(assemblies=True)
cmds.select(curves_group)
mel.eval('qlCreatePattern()')
objects_after = cmds.ls(assemblies=True)
patterns = list((set(objects_after) - set(obje... |
def qlCreateSeam(curve1, curve2):
'\n Create a seam between two selected curves\n '
cmds.select([curve1, curve2])
seam_shape = mel.eval('qlCreateSeam()')
return seam_shape
|
def qlCreateCollider(cloth, target):
"\n Marks object as a collider object for cloth --\n eshures that cloth won't penetrate body when simulated\n "
objects_before = cmds.ls(assemblies=True)
cmds.select([cloth, target])
mel.eval('qlCreateCollider()')
objects_after = cmds.ls(assemb... |
def qlCreateAttachConstraint(points, target):
objects_before = cmds.ls(assemblies=True)
if (not isinstance(target, list)):
cmds.select((points + [target]))
else:
cmds.select((points + target))
mel.eval('qlCreateAttachConstraint()')
objects_after = cmds.ls(assemblies=True)
const... |
def qlCleanCache(cloth):
'Clean layback cache for given cloth. Accepts qlCloth object'
cmds.select(cloth)
mel.eval('qlClearCache()')
|
def qlReinitSolver(cloth, solver):
'Reinitialize solver \n set both cloth and solver to the initial state before simulation was applied\n NOTE: useful for correct reload of garments on delete\n '
cmds.select([cloth, solver])
mel.eval('qlReinitializeSolver()')
|
def start_maya_sim(garment, props):
'Start simulation through Maya defalut playback without checks\n Gives Maya user default control over stopping & resuming sim\n Current qlCloth material properties from Maya are used (instead of garment config)\n '
config = props['config']
solver = _ini... |
def run_sim(garment, props):
'\n Setup and run cloth simulator untill static equlibrium is achieved.\n Note:\n * Assumes garment is already properly aligned!\n * All of the garments existing in Maya scene will be simulated\n because solver is shared!!\n '
... |
def findSolver():
'\n Returns the name of the qlSover existing in the scene\n (usully solver is created once per scene)\n '
solver = cmds.ls('*qlSolver*Shape*')
return (solver[0] if solver else None)
|
def deleteSolver():
'deletes all solver objects from the scene'
cmds.delete(cmds.ls('*qlSolver*'))
|
def flipPanelNormal(panel_geom):
'Set flippling normals to True for a given panel geom objects\n at least one of the provided objects should a qlPattern object'
ql_pattern = [obj for obj in panel_geom if ('Pattern' in obj)]
ql_pattern = ql_pattern[0]
shape = cmds.listRelatives(ql_pattern, shape... |
def getVertsOnCurve(panel_node, curve, curve_group=None):
"\n Return the list of mesh vertices located on the curve \n * panel_node is qlPattern object to which the curve belongs\n * curve is a main name of a curve object to get vertex info for\n OR any substring of it's full Maya ... |
def setColliderFriction(collider_objects, friction_value):
'Sets the level of friction of the given collider to friction_value'
main_collider = [obj for obj in collider_objects if ('Offset' not in obj)]
collider_shape = cmds.listRelatives(main_collider[0], shapes=True)
cmds.setAttr((collider_shape[0] ... |
def setFabricProps(cloth, props):
'Set given material propertied to qlClothObject'
if (not props):
return
cmds.setAttr((cloth + '.density'), props['density'], clamp=True)
cmds.setAttr((cloth + '.stretch'), props['stretch_resistance'], clamp=True)
cmds.setAttr((cloth + '.shear'), props['she... |
def setPanelsResolution(scaling):
'Set resoluiton conroller of all qlPatterns in the scene'
all_panels = cmds.ls('*qlPattern*', shapes=True)
for panel in all_panels:
cmds.setAttr((panel + '.resolutionScale'), scaling)
|
def fetchFabricProps(cloth):
"Returns current material properties of the cloth's objects\n Requires qlCloth object\n "
props = {}
props['density'] = cmds.getAttr((cloth + '.density'))
props['stretch_resistance'] = cmds.getAttr((cloth + '.stretch'))
props['shear_resistance'] = cmds.getAtt... |
def fetchColliderFriction(collider_objects):
'Retrieve collider friction info from given collider'
try:
main_collider = [obj for obj in collider_objects if ('Offset' not in obj)]
collider_shape = cmds.listRelatives(main_collider[0], shapes=True)
return cmds.getAttr((collider_shape[0] +... |
def fetchPanelResolution():
some_panels = cmds.ls('*qlPattern*')
shape = cmds.listRelatives(some_panels[0], shapes=True, path=True)
return cmds.getAttr((shape[0] + '.resolutionScale'))
|
def _init_sim(config):
'\n Basic simulation settings before starting simulation\n '
solver = findSolver()
cmds.setAttr((solver + '.selfCollision'), 1)
cmds.setAttr((solver + '.startTime'), 1)
cmds.setAttr((solver + '.solverStatistics'), 0)
cmds.playbackOptions(ps=0, max=config['max_s... |
def _set_gravity(solver, gravity):
'Set a given value of gravity to sim solver'
cmds.setAttr((solver + '.gravity1'), gravity)
|
def _update_progress(progress, total):
'Progress bar in console'
amtDone = (progress / total)
num_dash = int((amtDone * 50))
sys.stdout.write('\rProgress: [{0:50s}] {1:.1f}%'.format((('#' * num_dash) + ('-' * (50 - num_dash))), (amtDone * 100)))
sys.stdout.flush()
|
def _record_fail(props, fail_type, garment_name):
"add a failure recording to props. Creates nodes if don't exist"
if ('fails' not in props['stats']):
props['stats']['fails'] = {}
try:
props['stats']['fails'][fail_type].append(garment_name)
except KeyError:
props['stats']['fail... |
def single_file_sim(resources, props, caching=False):
'\n Simulates the given template and puts the results in original template folder, \n including config and statistics\n '
try:
init_sim_props(props, True)
qw.load_plugin()
scene = mymaya.Scene(os.path.join(resources... |
def batch_sim(resources, data_path, dataset_props, num_samples=None, caching=False, force_restart=False):
'\n Performs pattern simulation for each example in the dataset \n given by dataset_props. \n Batch processing is automatically resumed \n from the last unporcessed datapoint if re... |
def batch_sim_with_mtls(resources, data_path, dataset_props, mtls=None, num_samples=None, caching=False, force_restart=False):
if (('frozen' in dataset_props) and dataset_props['frozen']):
print('Warning: dataset is frozen, processing is skipped')
return True
resume = init_sim_props(dataset_pr... |
def init_sim_props(props, batch_run=False, force_restart=False):
' \n Add default config values if not given in props & clean-up stats if not resuming previous processing\n Returns a flag wheter current simulation is a resumed last one\n '
if ('sim' not in props):
props.set_section_co... |
def template_simulation(spec, scene, sim_props, delete_on_clean=False, caching=False, save_maya_scene=False):
'\n Simulate given template within given scene & save log files\n '
print('\nGarment load')
garment = mymaya.MayaGarment(spec)
try:
garment.load(shader_group=scene.cloth_SG()... |
def template_simulation_with_mtls(spec, scene, sim_props, delete_on_clean=False, caching=False, save_maya_scene=False):
shd_names = list(scene.Mtls.material_types)
num_body = 1
sim_names = list(sim_props.keys())
names = list(set(shd_names).intersection(sim_names))
for name in names:
print(... |
def _serialize_props_with_sim_stats(dataset_props, filename):
'Compute data processing statistics and serialize props to file'
dataset_props.stats_summary()
dataset_props.serialize(filename)
|
def _get_pattern_files(data_path, dataset_props):
' Collects paths to all the pattern files in given folder'
to_ignore = ['renders']
pattern_specs = []
(root, dirs, files) = next(os.walk(data_path))
if dataset_props['to_subfolders']:
for directory in dirs:
if (directory not in ... |
class NumpyArrayEncoder(JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
return JSONEncoder.default(self, obj)
|
def load_file(filepath, name='object'):
'Load mesh to the scene'
if (not os.path.isfile(filepath)):
raise RuntimeError('Loading Object from file to Maya::Missing file {}'.format(filepath))
obj = cmds.file(filepath, i=True, rnn=True)[0]
obj = cmds.rename(obj, (name + '#'))
return obj
|
def save_mesh(target, to_file):
'Save given object to file as a mesh'
cmds.select(clear=True)
cmds.select(target)
cmds.file(to_file, type='OBJExport', exportSelectedStrict=True, options='groups=0;ptgroups=0;materials=0;smoothing=0;normals=1', force=True, defaultExtensions=False)
cmds.select(clear=... |
def get_dag(object_name):
'Return DAG for requested object'
selectionList = OpenMaya.MSelectionList()
selectionList.add(object_name)
dag = OpenMaya.MDagPath()
selectionList.getDagPath(0, dag)
return dag
|
def get_mesh_dag(object_name):
'Return MFnMesh object by the object name'
dag = get_dag(object_name)
mesh = OpenMaya.MFnMesh(dag)
return (mesh, dag)
|
def get_vertices_np(mesh):
'\n Retreive vertex info as np array for given mesh object\n '
maya_vertices = OpenMaya.MPointArray()
mesh.getPoints(maya_vertices, OpenMaya.MSpace.kWorld)
vertices = np.empty((maya_vertices.length(), 3))
for i in range(maya_vertices.length()):
for j in... |
def match_vert_lists(short_list, long_list):
'\n Find the vertices from long list that correspond to verts in short_list\n Both lists are numpy arrays\n NOTE: Assuming order is matching => O(len(long_list)) complexity: \n order of vertices in short list is the same as in long list ... |
def test_ray_intersect(mesh, raySource, rayVector, accelerator=None, hit_tol=None, return_info=False):
'Check if given ray intersect given mesh\n * hit_tol ignores intersections that are within hit_tol from the ray source (as % of ray length) -- usefull when checking self-intersect\n * mesh is expec... |
def edge_vert_ids(mesh, edge_id):
'Return vertex ids for a given edge in given mesh'
script_util = OpenMaya.MScriptUtil(0.0)
v_ids_cptr = script_util.asInt2Ptr()
mesh.getEdgeVertices(edge_id, v_ids_cptr)
ty = (ctypes.c_uint * 2)
v_ids_list = ty.from_address(int(v_ids_cptr))
return (v_ids_l... |
def scale_to_cm(target, max_height_cm=220):
'Heuristically check the target units and scale to cantimeters if other units are detected\n * default value of max_height_cm is for meshes of humans\n '
bb = cmds.polyEvaluate(target, boundingBox=True)
height = (bb[1][1] - bb[1][0])
if (height < (... |
def eulerAngleToRoatationMatrix(theta):
R_x = np.array([[1, 0, 0], [0, math.cos(theta[0]), (- math.sin(theta[0]))], [0, math.sin(theta[0]), math.cos(theta[0])]])
R_y = np.array([[math.cos(theta[1]), 0, math.sin(theta[1])], [0, 1, 0], [(- math.sin(theta[1])), 0, math.cos(theta[1])]])
R_z = np.array([[math.... |
def isRotationMatrix(R):
Rt = np.transpose(R)
shouldBeIdentity = np.dot(Rt, R)
I = np.identity(3, dtype=R.dtype)
n = np.linalg.norm((I - shouldBeIdentity))
return (n < 1e-06)
|
def rotationMatrixToEulerAngles(R):
assert isRotationMatrix(R)
sy = math.sqrt(((R[(0, 0)] * R[(0, 0)]) + (R[(1, 0)] * R[(1, 0)])))
singular = (sy < 1e-06)
if (not singular):
x = math.atan2(R[(2, 1)], R[(2, 2)])
y = math.atan2((- R[(2, 0)]), sy)
z = math.atan2(R[(1, 0)], R[(0, 0... |
def load_pose_data(data_file):
spin = (True if data_file.endswith('.json') else False)
if spin:
data = json.load(open(data_file, 'r'))
if ('rotmat_tuned' in data):
rotmat = np.array(data['rotmat_tuned'])
else:
rotmat = np.array(data['rotmat'])
poses = []... |
def _Rx(theta):
return np.matrix([[1, 0, 0], [0, m.cos(theta), (- m.sin(theta))], [0, m.sin(theta), m.cos(theta)]])
|
def _Ry(theta):
return np.matrix([[m.cos(theta), 0, m.sin(theta)], [0, 1, 0], [(- m.sin(theta)), 0, m.cos(theta)]])
|
def _Rz(theta):
return np.matrix([[m.cos(theta), (- m.sin(theta)), 0], [m.sin(theta), m.cos(theta), 0], [0, 0, 1]])
|
def euler_xyz_to_R(euler):
'Convert to Rotation matrix.\n Expects input in degrees.\n Only support Maya convension of intrinsic xyz Euler Angles\n '
return ((_Rz(np.deg2rad(euler[2])) * _Ry(np.deg2rad(euler[1]))) * _Rx(np.deg2rad(euler[0])))
|
def R_to_euler(R):
'\n Convert Rotation matrix to Euler-angles in degrees (in Maya convension of intrinsic xyz Euler Angles)\n NOTE: \n Routine produces one of the possible Euler angles, corresponding to input rotations (the Euler angles are not uniquely defined)\n '
tol = (sys.flo... |
def copy2cpu(tensor):
if isinstance(tensor, np.ndarray):
return tensor
return tensor.detach().cpu().numpy()
|
class PanelClasses():
' Interface to access panel classification by role '
def __init__(self, classes_file):
self.filename = classes_file
with open(classes_file, 'r') as f:
self.classes = json.load(f, object_pairs_hook=OrderedDict)
self.names = list(self.classes.keys())
... |
def flip_img(img):
'Flip rgb images or masks.\n channels come last, e.g. (256,256,3).\n '
img = np.fliplr(img)
return img
|
def _dict_to_tensors(dict_obj):
'convert a dictionary with numeric values into a new dictionary with torch tensors'
new_dict = dict.fromkeys(dict_obj.keys())
for (key, value) in dict_obj.items():
if (key == 'image'):
new_dict[key] = value
elif (value is None):
new_d... |
class SampleToTensor(object):
'Convert ndarrays in sample to Tensors.'
def __call__(self, sample):
return _dict_to_tensors(sample)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.