calib_toml_to_opencap
This commit is contained in:
parent
7d92672371
commit
450079033a
@ -10,8 +10,8 @@
|
||||
Converts AlphaPose single json file to OpenPose frame-by-frame files.
|
||||
|
||||
Usage:
|
||||
python -m AlphaPose_to_OpenPose -i "<input_alphapose_json_file>" -o "<output_openpose_json_folder>"
|
||||
OR python -m AlphaPose_to_OpenPose -i "<input_alphapose_json_file>"
|
||||
python -m AlphaPose_to_OpenPose -i input_alphapose_json_file -o output_openpose_json_folder
|
||||
OR python -m AlphaPose_to_OpenPose -i input_alphapose_json_file
|
||||
OR from Pose2Sim.Utilities import AlphaPose_to_OpenPose; AlphaPose_to_OpenPose.AlphaPose_to_OpenPose_func(r'input_alphapose_json_file', r'output_openpose_json_folder')
|
||||
'''
|
||||
|
||||
@ -39,8 +39,8 @@ def AlphaPose_to_OpenPose_func(*args):
|
||||
Converts AlphaPose single json file to OpenPose frame-by-frame files.
|
||||
|
||||
Usage:
|
||||
python -m AlphaPose_to_OpenPose -i "<input_alphapose_json_file>" -o "<output_openpose_json_folder>"
|
||||
OR python -m AlphaPose_to_OpenPose -i "<input_alphapose_json_file>"
|
||||
python -m AlphaPose_to_OpenPose -i input_alphapose_json_file -o output_openpose_json_folder
|
||||
OR python -m AlphaPose_to_OpenPose -i input_alphapose_json_file
|
||||
OR from Pose2Sim.Utilities import AlphaPose_to_OpenPose; AlphaPose_to_OpenPose.AlphaPose_to_OpenPose_func(r'input_alphapose_json_file', r'output_openpose_json_folder')
|
||||
'''
|
||||
|
||||
|
@ -15,9 +15,9 @@
|
||||
You may also need to install tables: `pip install tables`
|
||||
|
||||
Usage:
|
||||
python -m Blazepose_runsave -i "<input_file>" --display --save_images --save_video --to_csv --to_h5 --to_json --model_complexity 2 -o "<output_folder>"
|
||||
OR python -m Blazepose_runsave -i "<input_file>" --display --to_json --save_images
|
||||
OR python -m Blazepose_runsave -i "<input_file>" -dJs
|
||||
python -m Blazepose_runsave -i input_file --display --save_images --save_video --to_csv --to_h5 --to_json --model_complexity 2 -o output_folder
|
||||
OR python -m Blazepose_runsave -i input_file --display --to_json --save_images
|
||||
OR python -m Blazepose_runsave -i input_file -dJs
|
||||
OR from Pose2Sim.Utilities import Blazepose_runsave; Blazepose_runsave.blazepose_detec_func(input_file=r'input_file', save_images=True, to_json=True, model_complexity=2)
|
||||
'''
|
||||
|
||||
@ -133,9 +133,9 @@ def blazepose_detec_func(**args):
|
||||
You may also need to install tables: `pip install tables`
|
||||
|
||||
Usage:
|
||||
python -m Blazepose_runsave -i "<input_file>" --display --save_images --save_video --to_csv --to_h5 --to_json --model_complexity 2 -o "<output_folder>"
|
||||
OR python -m Blazepose_runsave -i "<input_file>" --display --to_json --save_images
|
||||
OR python -m Blazepose_runsave -i "<input_file>" -dJs
|
||||
python -m Blazepose_runsave -i input_file --display --save_images --save_video --to_csv --to_h5 --to_json --model_complexity 2 -o output_folder
|
||||
OR python -m Blazepose_runsave -i input_file --display --to_json --save_images
|
||||
OR python -m Blazepose_runsave -i input_file -dJs
|
||||
OR from Pose2Sim.Utilities import Blazepose_runsave; Blazepose_runsave.blazepose_detec_func(input_file=r'input_file', save_images=True, to_json=True, model_complexity=2)
|
||||
'''
|
||||
|
||||
|
@ -11,8 +11,8 @@
|
||||
You may need to install tables: 'pip install tables' or 'conda install pytables'
|
||||
|
||||
Usage:
|
||||
python -m DLC_to_OpenPose -i "<input_h5_file>" -o "<output_json_folder>"
|
||||
OR python -m DLC_to_OpenPose -i "<input_h5_file>"
|
||||
python -m DLC_to_OpenPose -i input_h5_file -o output_json_folder
|
||||
OR python -m DLC_to_OpenPose -i input_h5_file
|
||||
OR from Pose2Sim.Utilities import DLC_to_OpenPose; DLC_to_OpenPose.DLC_to_OpenPose_func(r'input_h5_file', r'output_json_folder')
|
||||
'''
|
||||
|
||||
@ -43,8 +43,8 @@ def DLC_to_OpenPose_func(*args):
|
||||
Translates DeepLabCut (h5) 2D pose estimation files into OpenPose (json) files.
|
||||
|
||||
Usage:
|
||||
DLC_to_OpenPose -i "<input_h5_file>" -o "<output_json_folder>"
|
||||
OR DLC_to_OpenPose -i "<input_h5_file>"
|
||||
DLC_to_OpenPose -i input_h5_file -o output_json_folder
|
||||
OR DLC_to_OpenPose -i input_h5_file
|
||||
OR import DLC_to_OpenPose; DLC_to_OpenPose.DLC_to_OpenPose_func(r'input_h5_file', r'output_json_folder')
|
||||
'''
|
||||
|
||||
|
@ -14,8 +14,8 @@
|
||||
|
||||
Usage:
|
||||
from Pose2Sim.Utilities import c3d_to_trc; c3d_to_trc.c3d_to_trc_func(r'<input_c3d_file>')
|
||||
python -m c3d_to_trc -i "<input_c3d_file>"
|
||||
python -m c3d_to_trc -i "<input_c3d_file>" -o "<output_c3d_file>"
|
||||
python -m c3d_to_trc -i input_c3d_file
|
||||
python -m c3d_to_trc -i input_c3d_file -o output_c3d_file
|
||||
'''
|
||||
|
||||
|
||||
@ -45,8 +45,8 @@ def c3d_to_trc_func(*args):
|
||||
|
||||
Usage:
|
||||
import c3d_to_trc; c3d_to_trc.c3d_to_trc_func(r'<input_c3d_file>')
|
||||
c3d_to_trc -i "<input_c3d_file>"
|
||||
c3d_to_trc -i "<input_c3d_file>" -o "<output_c3d_file>"
|
||||
c3d_to_trc -i input_c3d_file
|
||||
c3d_to_trc -i input_c3d_file -o output_c3d_file
|
||||
'''
|
||||
|
||||
try:
|
||||
|
@ -12,8 +12,8 @@
|
||||
|
||||
Usage:
|
||||
from Pose2Sim.Utilities import calib_qca_to_toml; calib_qca_to_toml.calib_qca_to_toml_func(r'<input_qca_file>')
|
||||
OR python -m calib_qca_to_toml -i "<input_qca_file>"
|
||||
OR python -m calib_qca_to_toml -i "<input_qca_file>" --binning_factor 2 -o "<output_toml_file>"
|
||||
OR python -m calib_qca_to_toml -i input_qca_file
|
||||
OR python -m calib_qca_to_toml -i input_qca_file --binning_factor 2 -o output_toml_file
|
||||
'''
|
||||
|
||||
|
||||
@ -195,8 +195,8 @@ def calib_qca_to_toml_func(*args):
|
||||
|
||||
Usage:
|
||||
import calib_qca_to_toml; calib_qca_to_toml.calib_qca_to_toml_func(r'<input_qca_file>')
|
||||
OR calib_qca_to_toml -i "<input_qca_file>"
|
||||
OR calib_qca_to_toml -i "<input_qca_file>" --binning_factor 2 -o "<output_toml_file>"
|
||||
OR calib_qca_to_toml -i input_qca_file
|
||||
OR calib_qca_to_toml -i input_qca_file --binning_factor 2 -o output_toml_file
|
||||
'''
|
||||
|
||||
try:
|
||||
|
182
Pose2Sim/Utilities/calib_toml_to_opencap.py
Normal file
182
Pose2Sim/Utilities/calib_toml_to_opencap.py
Normal file
@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
'''
|
||||
##################################################
|
||||
## TOML CALIBRATION TO OPENCAP CALIBRATION ##
|
||||
##################################################
|
||||
|
||||
Convert an OpenCV .toml calibration file
|
||||
to OpenCap .pickle calibration files.
|
||||
One file will be created for each camera.
|
||||
|
||||
Usage:
|
||||
from Pose2Sim.Utilities import calib_toml_to_opencap; calib_toml_to_opencap.calib_toml_to_opencap_func(r'<input_toml_file>')
|
||||
OR python -m calib_toml_to_opencap -t input_toml_file
|
||||
OR python -m calib_toml_to_opencap -t input_toml_file -o output_calibration_folder>
|
||||
'''
|
||||
|
||||
## INIT
|
||||
import os
|
||||
import pickle
|
||||
import argparse
|
||||
import numpy as np
|
||||
import toml
|
||||
import cv2
|
||||
|
||||
|
||||
## AUTHORSHIP INFORMATION
|
||||
__author__ = "David Pagnon"
|
||||
__copyright__ = "Copyright 2021, Pose2Sim"
|
||||
__credits__ = ["David Pagnon"]
|
||||
__license__ = "BSD 3-Clause License"
|
||||
__version__ = '0.4'
|
||||
__maintainer__ = "David Pagnon"
|
||||
__email__ = "contact@david-pagnon.com"
|
||||
__status__ = "Development"
|
||||
|
||||
|
||||
## FUNCTIONS
|
||||
def RT_qca2cv(r, t):
|
||||
'''
|
||||
Converts rotation R and translation T
|
||||
from Qualisys object centered perspective
|
||||
to OpenCV camera centered perspective
|
||||
and inversely.
|
||||
|
||||
Qc = RQ+T --> Q = R-1.Qc - R-1.T
|
||||
'''
|
||||
|
||||
r = r.T
|
||||
t = - r.dot(t)
|
||||
|
||||
return r, t
|
||||
|
||||
|
||||
def rotate_cam(r, t, ang_x=0, ang_y=0, ang_z=0):
|
||||
'''
|
||||
Apply rotations around x, y, z in cameras coordinates
|
||||
Angle in radians
|
||||
'''
|
||||
|
||||
r,t = np.array(r), np.array(t)
|
||||
if r.shape == (3,3):
|
||||
rt_h = np.block([[r,t.reshape(3,1)], [np.zeros(3), 1 ]])
|
||||
elif r.shape == (3,):
|
||||
rt_h = np.block([[cv2.Rodrigues(r)[0],t.reshape(3,1)], [np.zeros(3), 1 ]])
|
||||
|
||||
r_ax_x = np.array([1,0,0, 0,np.cos(ang_x),-np.sin(ang_x), 0,np.sin(ang_x),np.cos(ang_x)]).reshape(3,3)
|
||||
r_ax_y = np.array([np.cos(ang_y),0,np.sin(ang_y), 0,1,0, -np.sin(ang_y),0,np.cos(ang_y)]).reshape(3,3)
|
||||
r_ax_z = np.array([np.cos(ang_z),-np.sin(ang_z),0, np.sin(ang_z),np.cos(ang_z),0, 0,0,1]).reshape(3,3)
|
||||
r_ax = r_ax_z.dot(r_ax_y).dot(r_ax_x)
|
||||
|
||||
r_ax_h = np.block([[r_ax,np.zeros(3).reshape(3,1)], [np.zeros(3), 1]])
|
||||
r_ax_h__rt_h = r_ax_h.dot(rt_h)
|
||||
|
||||
r = r_ax_h__rt_h[:3,:3]
|
||||
t = r_ax_h__rt_h[:3,3]
|
||||
|
||||
return r, t
|
||||
|
||||
|
||||
def read_toml(toml_path):
|
||||
'''
|
||||
Read an OpenCV .toml calibration file
|
||||
Returns 5 lists of size N (N=number of cameras):
|
||||
- S (image size),
|
||||
- D (distorsion),
|
||||
- K (intrinsic parameters),
|
||||
- R (extrinsic rotation),
|
||||
- T (extrinsic translation)
|
||||
'''
|
||||
|
||||
calib = toml.load(toml_path)
|
||||
C, S, D, K, R, T = [], [], [], [], [], []
|
||||
for cam in list(calib.keys()):
|
||||
if cam != 'metadata':
|
||||
C += [calib[cam]['name']]
|
||||
S += [np.array(calib[cam]['size'])]
|
||||
D += [np.array(calib[cam]['distortions'])]
|
||||
K += [np.array(calib[cam]['matrix'])]
|
||||
R += [np.array(calib[cam]['rotation'])]
|
||||
T += [np.array(calib[cam]['translation'])]
|
||||
|
||||
return C, S, D, K, R, T
|
||||
|
||||
|
||||
def write_opencap_pickle(output_calibration_folder, C, S, D, K, R, T):
|
||||
'''
|
||||
Writes OpenCap .pickle calibration files
|
||||
|
||||
Extrinsics in OpenCap are calculated with a vertical board for the world frame.
|
||||
As we want the world frame to be horizontal, we need to rotate cameras by -Pi/2 around x in the world frame.
|
||||
T is good the way it is.
|
||||
|
||||
INPUTS:
|
||||
- Path of the output calibration folder
|
||||
- C: list of camera names
|
||||
- S: list of image sizes
|
||||
- D: list of distortion coefficients
|
||||
- K: list of intrinsic parameters
|
||||
- R (extrinsic rotation),
|
||||
- T (extrinsic translation)
|
||||
'''
|
||||
|
||||
for i in range(len(C)):
|
||||
# Transform rotation for vertical frame of reference (checkerboard vertical with OpenCap)
|
||||
R_mat = cv2.Rodrigues(R[i])[0] # transform in matrix
|
||||
R_w, T_w = RT_qca2cv(R_mat, T[i]) # transform in world centered perspective
|
||||
R_w_90, T_w_90 = rotate_cam(R_w, T_w, ang_x=-np.pi/2, ang_y=0, ang_z=np.pi) # rotate cam wrt world frame
|
||||
R_c, T_c = RT_qca2cv(R_w_90, T_w_90) # transform in camera centered perspective
|
||||
|
||||
# retrieve data
|
||||
calib_data = {'distortion': np.append(D[i],np.array([0])),
|
||||
'intrinsicMat': K[i],
|
||||
'imageSize': np.expand_dims(S[i][::-1], axis=1),
|
||||
'rotation': R_c,
|
||||
'translation': np.expand_dims(T[i], axis=1)*1000,
|
||||
'rotation_EulerAngles': cv2.Rodrigues(R_c)[0] # OpenCap calls these Euler angles but they are actually the Rodrigues vector (Euler is ambiguous)
|
||||
}
|
||||
|
||||
# write pickle
|
||||
with open(os.path.join(output_calibration_folder, f'cam{i:02d}.pickle'), 'wb') as f_out:
|
||||
pickle.dump(calib_data, f_out)
|
||||
|
||||
|
||||
def calib_toml_to_opencap_func(*args):
|
||||
'''
|
||||
Convert an OpenCV .toml calibration file
|
||||
to OpenCap .pickle calibration files.
|
||||
One file will be created for each camera.
|
||||
|
||||
Usage:
|
||||
from Pose2Sim.Utilities import calib_toml_to_opencap; calib_toml_to_opencap.calib_toml_to_opencap_func(r'<input_toml_file>')
|
||||
OR python -m calib_toml_to_opencap -t input_toml_file
|
||||
OR python -m calib_toml_to_opencap -t input_toml_file -o output_calibration_folder
|
||||
'''
|
||||
|
||||
try:
|
||||
toml_path = os.path.realpath(args[0].get('toml_file')) # invoked with argparse
|
||||
if args[0]['output_calibration_folder'] == None:
|
||||
output_calibration_folder = os.path.dirname(toml_path)
|
||||
else:
|
||||
output_calibration_folder = os.path.realpath(args[0]['output_calibration_folder'])
|
||||
except:
|
||||
toml_path = os.path.realpath(args[0]) # invoked as a function
|
||||
output_calibration_folder = os.path.dirname(toml_path)
|
||||
|
||||
C, S, D, K, R, T = read_toml(toml_path)
|
||||
write_opencap_pickle(output_calibration_folder, C, S, D, K, R, T)
|
||||
|
||||
print(f'OpenCap calibration files generated at {output_calibration_folder}.\n')
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-t', '--toml_file', required = True, help='Input OpenCV .toml calibration file')
|
||||
parser.add_argument('-o', '--output_calibration_folder', required = False, help='OpenCap calibration folder')
|
||||
args = vars(parser.parse_args())
|
||||
|
||||
calib_toml_to_opencap_func(args)
|
@ -12,8 +12,8 @@
|
||||
|
||||
Usage:
|
||||
from Pose2Sim.Utilities import calib_toml_to_qca; calib_toml_to_qca.calib_toml_to_qca_func(r'<input_toml_file>')
|
||||
OR python -m calib_toml_to_qca -i "<input_toml_file>"
|
||||
OR python -m calib_toml_to_qca -i "<input_toml_file>" --binning_factor 2 --pixel_size 5.54e-3 -o "<output_qca_file>"
|
||||
OR python -m calib_toml_to_qca -i input_toml_file
|
||||
OR python -m calib_toml_to_qca -i input_toml_file --binning_factor 2 --pixel_size 5.54e-3 -o output_qca_file
|
||||
'''
|
||||
|
||||
|
||||
@ -152,8 +152,8 @@ def calib_toml_to_qca_func(**args):
|
||||
|
||||
Usage:
|
||||
import calib_toml_to_qca; calib_toml_to_qca.calib_toml_to_qca_func(input_file=r'<input_toml_file>')
|
||||
OR calib_toml_to_qca -i "<input_toml_file>"
|
||||
OR calib_toml_to_qca -i "<input_toml_file>" --binning_factor 2 --pixel_size 5.54e-3 -o "<output_qca_file>"
|
||||
OR calib_toml_to_qca -i input_toml_file
|
||||
OR calib_toml_to_qca -i input_toml_file --binning_factor 2 --pixel_size 5.54e-3 -o output_qca_file
|
||||
'''
|
||||
|
||||
toml_path = args.get('input_file')
|
||||
|
@ -12,8 +12,8 @@
|
||||
|
||||
Usage:
|
||||
from Pose2Sim.Utilities import calib_toml_to_yml; calib_toml_to_yml.calib_toml_to_yml_func(r'<input_toml_file>')
|
||||
OR python -m calib_yml_to_toml -t "<input_toml_file>"
|
||||
OR python -m calib_yml_to_toml -t "<input_toml_file>" -i "<intrinsic_yml_file>" -e "<extrinsic_yml_file>"
|
||||
OR python -m calib_yml_to_toml -t input_toml_file
|
||||
OR python -m calib_yml_to_toml -t input_toml_file -i intrinsic_yml_file -e extrinsic_yml_file
|
||||
'''
|
||||
|
||||
## INIT
|
||||
@ -116,8 +116,8 @@ def calib_toml_to_yml_func(*args):
|
||||
|
||||
Usage:
|
||||
import calib_toml_to_yml; calib_toml_to_yml.calib_toml_to_yml_func(r'<input_toml_file>')
|
||||
OR python -m calib_toml_to_yml -t "<input_toml_file>"
|
||||
OR python -m calib_toml_to_yml -t "<input_toml_file>" -i "<intrinsic_yml_file>" -e "<extrinsic_yml_file>"
|
||||
OR python -m calib_toml_to_yml -t input_toml_file
|
||||
OR python -m calib_toml_to_yml -t input_toml_file -i intrinsic_yml_file -e extrinsic_yml_file
|
||||
'''
|
||||
|
||||
try:
|
||||
@ -142,7 +142,7 @@ def calib_toml_to_yml_func(*args):
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-t', '--toml_file', required = True, help='OpenCV intrinsic .yml calibration file')
|
||||
parser.add_argument('-t', '--toml_file', required = True, help='Input OpenCV .toml calibration file')
|
||||
parser.add_argument('-i', '--intrinsic_yml_file', required = False, help='OpenCV intrinsic .yml calibration file')
|
||||
parser.add_argument('-e', '--extrinsic_yml_file', required = False, help='OpenCV extrinsic .yml calibration file')
|
||||
args = vars(parser.parse_args())
|
||||
|
@ -15,8 +15,8 @@
|
||||
|
||||
Usage:
|
||||
import calib_yml_to_toml; calib_yml_to_toml.calib_yml_to_toml_func(r'<intrinsic_yml_file>', r'<extrinsic_yml_file>')
|
||||
OR python -m calib_yml_to_toml -i <intrinsic_yml_file> -e <extrinsic_yml_file>
|
||||
OR python -m calib_yml_to_toml -i <intrinsic_yml_file> -e <extrinsic_yml_file> -o "<output_toml_file>"
|
||||
OR python -m calib_yml_to_toml -i intrinsic_yml_file -e extrinsic_yml_file
|
||||
OR python -m calib_yml_to_toml -i intrinsic_yml_file -e extrinsic_yml_file -o output_toml_file
|
||||
'''
|
||||
|
||||
|
||||
@ -123,8 +123,8 @@ def calib_yml_to_toml_func(*args):
|
||||
|
||||
Usage:
|
||||
import calib_yml_to_toml; calib_yml_to_toml.calib_yml_to_toml_func(r'<intrinsic_yml_file>', r'<extrinsic_yml_file>')
|
||||
OR python -m calib_yml_to_toml -i <intrinsic_yml_file> -e <extrinsic_yml_file>
|
||||
OR python -m calib_yml_to_toml -i <intrinsic_yml_file> -e <extrinsic_yml_file> -o "<output_toml_file>"
|
||||
OR python -m calib_yml_to_toml -i intrinsic_yml_file -e extrinsic_yml_file
|
||||
OR python -m calib_yml_to_toml -i intrinsic_yml_file -e extrinsic_yml_file -o output_toml_file
|
||||
'''
|
||||
try:
|
||||
intrinsic_path = os.path.realpath(args[0].get('intrinsic_file')) # invoked with argparse
|
||||
|
@ -17,8 +17,8 @@
|
||||
images.
|
||||
|
||||
Usage:
|
||||
python -m json_display_with_img -j "<json_folder>" -i "<raw_img_folder>"
|
||||
python -m json_display_with_img -j "<json_folder>" -i "<raw_img_folder>" -o "<output_img_folder>" -d True -s True
|
||||
python -m json_display_with_img -j json_folder -i raw_img_folder
|
||||
python -m json_display_with_img -j json_folder -i raw_img_folder -o output_img_folder -d True -s True
|
||||
from Pose2Sim.Utilities import json_display_with_img; json_display_with_img.json_display_with_img_func(json_folder=r'<json_folder>', raw_img_folder=r'<raw_img_folder>')
|
||||
'''
|
||||
|
||||
@ -54,8 +54,8 @@ def json_display_with_img_func(**args):
|
||||
images.
|
||||
|
||||
Usage:
|
||||
json_display_with_img -j "<json_folder>" -i "<raw_img_folder>"
|
||||
json_display_with_img -j "<json_folder>" -i "<raw_img_folder>" -o "<output_img_folder>" -d True -s True
|
||||
json_display_with_img -j json_folder -i raw_img_folder
|
||||
json_display_with_img -j json_folder -i raw_img_folder -o output_img_folder -d True -s True
|
||||
import json_display_with_img; json_display_with_img.json_display_with_img_func(json_folder=r'<json_folder>', raw_img_folder=r'<raw_img_folder>')
|
||||
'''
|
||||
|
||||
|
@ -14,8 +14,8 @@
|
||||
coordinates on the original images.
|
||||
|
||||
Usage:
|
||||
python -m json_display_without_img -j "<json_folder>"
|
||||
python -m json_display_without_img -j "<json_folder>" -o "<output_img_folder>" -d True -s True
|
||||
python -m json_display_without_img -j json_folder
|
||||
python -m json_display_without_img -j json_folder -o output_img_folder -d True -s True
|
||||
from Pose2Sim.Utilities import json_display_without_img; json_display_without_img.json_display_without_img_func(json_folder=r'<json_folder>')
|
||||
'''
|
||||
|
||||
@ -80,8 +80,8 @@ def json_display_without_img_func(**args):
|
||||
coordinates on the original images.
|
||||
|
||||
Usage:
|
||||
json_display_without_img -j "<json_folder>"
|
||||
json_display_without_img -j "<json_folder>" -o "<output_img_folder>" -d True -s True
|
||||
json_display_without_img -j json_folder
|
||||
json_display_without_img -j json_folder -o output_img_folder -d True -s True
|
||||
import json_display_without_img; json_display_without_img.json_display_without_img_func(json_folder=r'<json_folder>')
|
||||
'''
|
||||
|
||||
|
@ -11,8 +11,8 @@
|
||||
|
||||
Usage:
|
||||
from Pose2Sim.Utilities import trc_Zup_to_Yup; trc_Zup_to_Yup.trc_Zup_to_Yup_func(r'<input_trc_file>', r'<output_trc_file>')
|
||||
python -m trc_Zup_to_Yup -i "<input_trc_file>"
|
||||
python -m trc_Zup_to_Yup -i "<input_trc_file>" -o "<output_trc_file>"
|
||||
python -m trc_Zup_to_Yup -i input_trc_file
|
||||
python -m trc_Zup_to_Yup -i input_trc_file -o output_trc_file
|
||||
'''
|
||||
|
||||
|
||||
@ -40,8 +40,8 @@ def trc_Zup_to_Yup_func(*args):
|
||||
|
||||
Usage:
|
||||
import trc_Zup_to_Yup; trc_Zup_to_Yup.trc_Zup_to_Yup_func(r'<input_trc_file>', r'<output_trc_file>')
|
||||
trcZup_to_Yup -i "<input_trc_file>"
|
||||
trcZup_to_Yup -i "<input_trc_file>" -o "<output_trc_file>"
|
||||
trcZup_to_Yup -i input_trc_file
|
||||
trcZup_to_Yup -i input_trc_file -o output_trc_file
|
||||
'''
|
||||
|
||||
try:
|
||||
|
@ -14,8 +14,8 @@
|
||||
|
||||
Usage:
|
||||
from Pose2Sim.Utilities import trc_combine; trc_combine.trc_combine_func(r'<first_path>', r'<second_path>', r'<output_path>')
|
||||
OR python -m trc_combine -i "<first_path>" -j "<second_path>" -o "<output_path>"
|
||||
OR python -m trc_combine -i "<first_path>" -j "<second_path>"
|
||||
OR python -m trc_combine -i first_path -j second_path -o output_path
|
||||
OR python -m trc_combine -i first_path -j second_path
|
||||
'''
|
||||
|
||||
|
||||
@ -133,8 +133,8 @@ def trc_combine_func(*args):
|
||||
|
||||
Usage:
|
||||
from Pose2Sim.Utilities import trc_combine; trc_combine.trc_combine_func(r'<first_path>', r'<second_path>', r'<output_path>')
|
||||
OR python -m trc_combine -i "<first_path>" -j "<second_path>" -o "<output_path>"
|
||||
OR python -m trc_combine -i "<first_path>" -j "<second_path>"
|
||||
OR python -m trc_combine -i first_path -j second_path -o output_path
|
||||
OR python -m trc_combine -i first_path -j second_path
|
||||
'''
|
||||
|
||||
try:
|
||||
|
@ -10,8 +10,8 @@
|
||||
Undersample a trc file
|
||||
|
||||
Usage:
|
||||
python -m trc_desample -i "<input_trc_file>" -f <output_frequency>
|
||||
python -m trc_desample -i "<input_trc_file>" -f <output_frequency> -o "<output_trc_file>"
|
||||
python -m trc_desample -i input_trc_file -f <output_frequency>
|
||||
python -m trc_desample -i input_trc_file -f <output_frequency> -o output_trc_file
|
||||
from Pose2Sim.Utilities import trc_desample; trc_desample.trc_desample_func(r'input_trc_file', output_frequency, r'output_trc_file')
|
||||
'''
|
||||
|
||||
@ -39,8 +39,8 @@ def trc_desample_func(*args):
|
||||
Undersample a trc file
|
||||
|
||||
Usage:
|
||||
trc_desample -i "<input_trc_file>" -f <output_frequency>
|
||||
trc_desample -i "<input_trc_file>" -f <output_frequency> -o "<output_trc_file>"
|
||||
trc_desample -i input_trc_file -f <output_frequency>
|
||||
trc_desample -i input_trc_file -f <output_frequency> -o output_trc_file
|
||||
import trc_desample; trc_desample.trc_desample_func(r'input_trc_file', output_frequency, r'output_trc_file')
|
||||
'''
|
||||
|
||||
|
@ -12,18 +12,18 @@
|
||||
|
||||
Usage examples:
|
||||
Butterworth filter, low-pass, 4th order, cut off frequency 6 Hz:
|
||||
from Pose2Sim.Utilities import trc_filter; trc_filter.trc_filter_func(input_file = r"<input_trc_file>", output_file = r"<output_trc_file>",
|
||||
from Pose2Sim.Utilities import trc_filter; trc_filter.trc_filter_func(input_file = input_trc_file, output_file = output_trc_file,
|
||||
display=True, type='butterworth', pass_type = 'low', order=4, cut_off_frequency=6)
|
||||
OR python -m trc_filter -i "<input_trc_file>" -o "<output_trc_file>" -d True -t butterworth -p low -n 4 -f 6
|
||||
OR python -m trc_filter -i "<input_trc_file>" -t butterworth -p low -n 4 -f 6
|
||||
OR python -m trc_filter -i input_trc_file -o output_trc_file -d True -t butterworth -p low -n 4 -f 6
|
||||
OR python -m trc_filter -i input_trc_file -t butterworth -p low -n 4 -f 6
|
||||
Butterworth filter on speed, low-pass, 4th order, cut off frequency 6 Hz:
|
||||
python -m trc_filter -i "<input_trc_file>" -t butterworth_on_speed -p low -n 4 -f 6
|
||||
python -m trc_filter -i input_trc_file -t butterworth_on_speed -p low -n 4 -f 6
|
||||
Gaussian filter, kernel 5:
|
||||
python -m trc_filter -i "<input_trc_file>" -t gaussian, -k 5
|
||||
python -m trc_filter -i input_trc_file -t gaussian, -k 5
|
||||
LOESS filter, kernel 5: NB: frac = kernel * frames_number
|
||||
python -m trc_filter -i "<input_trc_file>" -t loess, -k 5
|
||||
python -m trc_filter -i input_trc_file -t loess, -k 5
|
||||
Median filter, kernel 5:
|
||||
python -m trc_filter -i "<input_trc_file>" -t gaussian, -k 5
|
||||
python -m trc_filter -i input_trc_file -t gaussian, -k 5
|
||||
'''
|
||||
|
||||
|
||||
@ -298,18 +298,18 @@ def trc_filter_func(**args):
|
||||
|
||||
Usage examples:
|
||||
Butterworth filter, low-pass, 4th order, cut off frequency 6 Hz:
|
||||
import trc_filter; trc_filter.trc_filter_func(input_file = r"<input_trc_file>", output_file = r"<output_trc_file>",
|
||||
import trc_filter; trc_filter.trc_filter_func(input_file = input_trc_file, output_file = output_trc_file,
|
||||
display=True, type='butterworth', pass_type = 'low', order=4, cut_off_frequency=6)
|
||||
OR python -m trc_filter -i "<input_trc_file>" -o "<output_trc_file>" -d True -t butterworth -p low -n 4 -f 6
|
||||
OR python -m trc_filter -i "<input_trc_file>" -t butterworth, -p low -n 4 -f 6
|
||||
OR python -m trc_filter -i input_trc_file -o output_trc_file -d True -t butterworth -p low -n 4 -f 6
|
||||
OR python -m trc_filter -i input_trc_file -t butterworth, -p low -n 4 -f 6
|
||||
Butterworth filter on speed, low-pass, 4th order, cut off frequency 6 Hz:
|
||||
python -m trc_filter -i "<input_trc_file>" -t butterworth_on_speed, -p low -n 4 -f 6
|
||||
python -m trc_filter -i input_trc_file -t butterworth_on_speed, -p low -n 4 -f 6
|
||||
Gaussian filter, kernel 5:
|
||||
python -m trc_filter -i "<input_trc_file>" -t gaussian, -k 5
|
||||
python -m trc_filter -i input_trc_file -t gaussian, -k 5
|
||||
LOESS filter, kernel 5: NB: frac = kernel * frames_number
|
||||
python -m trc_filter -i "<input_trc_file>" -t loess, -k 5
|
||||
python -m trc_filter -i input_trc_file -t loess, -k 5
|
||||
Median filter, kernel 5:
|
||||
python -m trc_filter -i "<input_trc_file>" -t gaussian, -k 5
|
||||
python -m trc_filter -i input_trc_file -t gaussian, -k 5
|
||||
'''
|
||||
|
||||
# Read trc header
|
||||
|
@ -12,8 +12,8 @@
|
||||
|
||||
Usage:
|
||||
from Pose2Sim.Utilities import trc_from_mot_osim; trc_from_mot_osim.trc_from_mot_osim_func(r'<input_mot_file>', r'<output_osim_file>', r'<output_trc_file>')
|
||||
python -m trc_from_mot_osim -m "<input_mot_file>" -o "<input_osim_file>"
|
||||
python -m trc_from_mot_osim -m "<input_mot_file>" -o "<input_osim_file>" -t "<output_trc_file>"
|
||||
python -m trc_from_mot_osim -m input_mot_file -o input_osim_file
|
||||
python -m trc_from_mot_osim -m input_mot_file -o input_osim_file -t output_trc_file
|
||||
'''
|
||||
|
||||
|
||||
@ -86,8 +86,8 @@ def trc_from_mot_osim_func(*args):
|
||||
|
||||
Usage:
|
||||
from Pose2Sim.Utilities import trc_from_mot_osim; trc_from_mot_osim.trc_from_mot_osim_func(r'<input_mot_file>', r'<output_osim_file>', r'<trc_output_file>')
|
||||
python -m trc_from_mot_osim -m "<input_mot_file>" -o "<input_osim_file>"
|
||||
python -m trc_from_mot_osim -m "<input_mot_file>" -o "<input_osim_file>" -t "<trc_output_file>"
|
||||
python -m trc_from_mot_osim -m input_mot_file -o input_osim_file
|
||||
python -m trc_from_mot_osim -m input_mot_file -o input_osim_file -t trc_output_file
|
||||
'''
|
||||
|
||||
try:
|
||||
|
@ -24,8 +24,8 @@
|
||||
eg -d=-Z or --gait_direction=-Z
|
||||
|
||||
from Pose2Sim.Utilities import trc_gaitevents; trc_gaitevents.trc_gaitevents_func(r'<input_trc_file>', '<gait_direction>')
|
||||
OR python -m trc_gaitevents -i "<input_trc_file>"
|
||||
OR python -m trc_gaitevents -i "<input_trc_file>" --gait_direction=-Z
|
||||
OR python -m trc_gaitevents -i input_trc_file
|
||||
OR python -m trc_gaitevents -i input_trc_file --gait_direction=-Z
|
||||
'''
|
||||
|
||||
|
||||
@ -142,8 +142,8 @@ def trc_gaitevents_func(*args):
|
||||
eg -d=-Z or --gait_direction=-Z
|
||||
|
||||
import trc_gaitevents; trc_gaitevents.trc_gaitevents_func(r'<input_trc_file>', '<gait_direction>')
|
||||
OR trc_gaitevents -i "<input_trc_file>" --gait_direction Z
|
||||
OR trc_gaitevents -i "<input_trc_file>" --gait_direction=-Z
|
||||
OR trc_gaitevents -i input_trc_file --gait_direction Z
|
||||
OR trc_gaitevents -i input_trc_file --gait_direction=-Z
|
||||
'''
|
||||
|
||||
try:
|
||||
|
@ -11,7 +11,7 @@
|
||||
|
||||
Usage:
|
||||
from Pose2Sim.Utilities import trc_plot; trc_plot.trc_plot_func(r'<input_trc_file>')
|
||||
OR python -m trc_plot -i "<input_trc_file>"
|
||||
OR python -m trc_plot -i input_trc_file
|
||||
'''
|
||||
|
||||
|
||||
@ -139,7 +139,7 @@ def trc_plot_func(*args):
|
||||
|
||||
Usage:
|
||||
import trc_plot; trc_plot.trc_plot_func(r'<input_trc_file>')
|
||||
OR trc_plot -i "<input_trc_file>"
|
||||
OR trc_plot -i input_trc_file
|
||||
'''
|
||||
|
||||
try:
|
||||
|
@ -182,7 +182,7 @@ Make sure you modify the [User\Config.toml](https://github.com/perfanalytics/pos
|
||||
However, it is less robust and accurate than OpenPose, and can only detect a single person.
|
||||
* Use the script `Blazepose_runsave.py` (see [Utilities](#utilities)) to run BlazePose under Python, and store the detected coordinates in OpenPose (json) or DeepLabCut (h5 or csv) format:
|
||||
```
|
||||
python -m Blazepose_runsave -i r"<input_file>" -dJs
|
||||
python -m Blazepose_runsave -i rinput_file -dJs
|
||||
```
|
||||
Type in `python -m Blazepose_runsave -h` for explanation on parameters and for additional ones.
|
||||
* Make sure you change the `pose_model` and the `tracked_keypoint` in the [User\Config.toml](https://github.com/perfanalytics/pose2sim/blob/main/Pose2Sim/Empty_project/User/Config.toml) file.
|
||||
@ -192,7 +192,7 @@ If you need to detect specific points on a human being, an animal, or an object,
|
||||
1. Train your DeepLabCut model and run it on your images or videos (more instruction on their repository)
|
||||
2. Translate the h5 2D coordinates to json files (with `DLC_to_OpenPose.py` script, see [Utilities](#utilities)):
|
||||
```
|
||||
python -m DLC_to_OpenPose -i r"<input_h5_file>"
|
||||
python -m DLC_to_OpenPose -i rinput_h5_file
|
||||
```
|
||||
3. Report the model keypoints in the [skeleton.py](https://github.com/perfanalytics/pose2sim/blob/main/Pose2Sim/skeletons.py) file, and make sure you change the `pose_model` and the `tracked_keypoint` in the [User\Config.toml](https://github.com/perfanalytics/pose2sim/blob/main/Pose2Sim/Empty_project/User/Config.toml) file.
|
||||
4. Create an OpenSim model if you need 3D joint angles.
|
||||
@ -202,7 +202,7 @@ If you need to detect specific points on a human being, an animal, or an object,
|
||||
* Install and run AlphaPose on your videos (more instruction on their repository)
|
||||
* Translate the AlphaPose single json file to OpenPose frame-by-frame files (with `AlphaPose_to_OpenPose.py` script, see [Utilities](#utilities)):
|
||||
```
|
||||
python -m AlphaPose_to_OpenPose -i r"<input_alphapose_json_file>"
|
||||
python -m AlphaPose_to_OpenPose -i input_alphapose_json_file
|
||||
```
|
||||
* Make sure you change the `pose_model` and the `tracked_keypoint` in the [User\Config.toml](https://github.com/perfanalytics/pose2sim/blob/main/Pose2Sim/Empty_project/User/Config.toml) file.
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user