2023-12-05 18:39:52 +08:00
import numpy as np
import pandas as pd
import matplotlib . pyplot as plt
from scipy import signal
from scipy import interpolate
import json
import os
import fnmatch
import pickle as pk
'''
#########################################
## Synchronize cameras ##
#########################################
Steps undergone in this script
0. Converting json files to pandas dataframe
1. Computing speeds ( either vertical , or 2 D speeds )
2. Plotting paired correlations of speeds from one camera viewpoint to another ( work on one single keypoint , or on all keypoints , or on a weighted selection of keypoints )
3.
Dans l ' idéal, on fait ça automatiqueement pour toutes les vues, en coisissant les paires 2 à 2 avec le plus haut coefficient de corrélation,
et on demande confirmation avant de supprimer les frames en question ( en réalité , renommées . json . del - option reset_sync dans le Config . toml )
'''
#############
# CONSTANTS #
#############
# pose_dir is populated with subfolders for each camera, each of them populated with json files
pose_dir = r ' GOp2AniPoitiersHalteroHaltero2pose-2d '
fps = 120 # frame rate of the cameras (Hz)
reset_sync = True # Start synchronization over each time it is run
cut_off_frequency = 10 # cut-off frequency for a 4th order low-pass Butterworth filter
# Vertical speeds (on X, Y, or Z axis, or 2D speeds)
speed_kind = ' y ' # 'x', 'y', 'z', or '2D'
2023-12-06 16:48:11 +08:00
vmax = 20 # px/s
2023-12-05 18:39:52 +08:00
cam1_nb = 4
cam2_nb = 3
id_kpt = [ 9 , 10 ] # Pour plus tard aller chercher numéro depuis keypoint name dans skeleton.py. 'RWrist' BLAZEPOSE 16, BODY_25B 10, BODY_25 4 ; 'LWrist' BLAZEPOSE 15, BODY_25B 9, BODY_25 7
weights_kpt = [ 1 , 1 ] # Pris en compte uniquement si on a plusieurs keypoints
frames = [ 2850 , 3490 ]
############
# FUNCTIONS#
############
2023-12-05 18:46:58 +08:00
def convert_json2csv ( json_dir ) :
2023-12-05 18:39:52 +08:00
json_files_names = fnmatch . filter ( os . listdir ( os . path . join ( json_dir ) ) , ' .json ' )
json_files_path = [ os . path . join ( json_dir , j_f ) for j_f in json_files_names ]
json_coords = [ ]
2023-12-05 19:07:26 +08:00
for i , j_p in enumerate ( json_files_path ) :
2023-12-05 18:39:52 +08:00
# if i in range(frames)
2023-12-05 19:07:26 +08:00
with open ( j_p ) as j_f :
try :
2023-12-05 18:39:52 +08:00
json_data = json . load ( j_f ) [ ' people ' ] [ 0 ] [ ' pose_keypoints_2d ' ]
2023-12-05 19:07:26 +08:00
except :
2023-12-05 18:39:52 +08:00
print ( f ' No person found in { os . path . basename ( json_dir ) } , frame { i } ' )
2023-12-05 19:07:26 +08:00
json_data = [ 0 ] * 75
2023-12-05 18:39:52 +08:00
json_coords . append ( json_data )
df_json_coords = pd . DataFrame ( json_coords )
return df_json_coords
2023-12-05 18:46:58 +08:00
def drop_col ( df , col_nb ) :
2023-12-05 18:39:52 +08:00
idx_col = list ( range ( col_nb - 1 , df . shape [ 1 ] , col_nb ) )
df_dropped = df . drop ( idx_col , axis = 1 )
df_dropped . columns = range ( df_dropped . columns . size )
return df_dropped
2023-12-05 18:46:58 +08:00
def speed_vert ( df , axis = ' y ' ) :
2023-12-05 19:07:26 +08:00
axis_dict = { ' x ' : 0 , ' y ' : 1 , ' z ' : 2 }
2023-12-05 18:39:52 +08:00
df_diff = df . diff ( )
2023-12-05 19:07:26 +08:00
df_diff = df_diff . fillna ( df_diff . iloc [ 1 ] * 2 )
df_vert_speed = pd . DataFrame ( [ df_diff . loc [ : , 2 * k + axis_dict [ axis ] ] for k in range ( int ( df_diff . shape [ 1 ] * 2 ) ) ] ) . T
2023-12-05 18:39:52 +08:00
df_vert_speed . columns = np . arange ( len ( df_vert_speed . columns ) )
return df_vert_speed
2023-12-05 18:46:58 +08:00
def speed_2D ( df ) :
2023-12-05 18:39:52 +08:00
df_diff = df . diff ( )
2023-12-05 19:07:26 +08:00
df_diff = df_diff . fillna ( df_diff . iloc [ 1 ] * 2 )
df_2Dspeed = pd . DataFrame ( [ np . sqrt ( df_diff . loc [ : , 2 * k ] * 2 + df_diff . loc [ : , 2 * k + 1 ] * 2 ) for k in range ( int ( df_diff . shape [ 1 ] * 2 ) ) ] ) . T
2023-12-05 18:39:52 +08:00
return df_2Dspeed
2023-12-05 18:46:58 +08:00
def interpolate_nans ( col , kind ) :
2023-12-05 18:39:52 +08:00
'''
Interpolate missing points ( of value nan )
INPUTS
- col pandas column of coordinates
- kind ' linear ' , ' slinear ' , ' quadratic ' , ' cubic ' . Default ' cubic '
OUTPUT
- col_interp interpolated pandas column
'''
idx = col . index
idx_good = np . where ( np . isfinite ( col ) ) [ 0 ] #index of non zeros
2023-12-05 19:07:26 +08:00
if len ( idx_good ) == 10 : return col
2023-12-05 18:39:52 +08:00
# idx_notgood = np.delete(np.arange(len(col)), idx_good)
2023-12-05 19:07:26 +08:00
if not kind : # 'linear', 'slinear', 'quadratic', 'cubic'
2023-12-05 19:11:29 +08:00
f_interp = interpolate . interp1d ( idx_good , col [ idx_good ] , kind = ' cubic ' , bounds_error = False )
2023-12-05 19:07:26 +08:00
else :
2023-12-05 18:39:52 +08:00
f_interp = interpolate . interp1d ( idx_good , col [ idx_good ] , kind = kind [ 0 ] , bounds_error = False )
col_interp = np . where ( np . isfinite ( col ) , col , f_interp ( idx ) ) #replace nans with interpolated values
col_interp = np . where ( np . isfinite ( col_interp ) , col_interp , np . nanmean ( col_interp ) ) #replace remaining nans
return col_interp #, idx_notgood
2023-12-05 18:46:58 +08:00
def plot_time_lagged_cross_corr ( camx , camy , ax ) :
2023-12-05 19:07:26 +08:00
pearson_r = [ camx . corr ( camy . shift ( lag ) ) for lag in range ( - 2 * fps , 2 * fps ) ] # lag -2 sec à +2 sec
2024-03-31 08:40:38 +08:00
offset = int ( np . floor ( len ( pearson_r ) / 2 ) - np . argmax ( pearson_r ) )
2023-12-05 18:39:52 +08:00
max_corr = np . max ( pearson_r )
2023-12-05 19:07:26 +08:00
ax . plot ( list ( range ( - 2 * fps , 2 * fps ) ) , pearson_r )
2024-03-31 08:40:38 +08:00
ax . axvline ( np . ceil ( len ( pearson_r ) / 2 ) - 2 * fps , color = ' k ' , linestyle = ' -- ' )
2023-12-05 19:07:26 +08:00
ax . axvline ( np . argmax ( pearson_r ) - 2 * fps , color = ' r ' , linestyle = ' -- ' , label = ' Peak synchrony ' )
2023-12-05 18:39:52 +08:00
plt . annotate ( f ' Max correlation= { np . round ( max_corr , 2 ) } ' , xy = ( 0.05 , 0.9 ) , xycoords = ' axes fraction ' )
ax . set ( title = f ' Offset = { offset } frames ' , xlabel = ' Offset (frames) ' , ylabel = ' Pearson r ' )
plt . legend ( )
return offset , max_corr
######################################
# 0. CONVERTING JSON FILES TO PANDAS #
######################################
# Also filter, and then save
pose_listdirs_names = next ( os . walk ( pose_dir ) ) [ 1 ]
json_dirs_names = [ k for k in pose_listdirs_names if ' json ' in k ]
json_dirs = [ os . path . join ( pose_dir , j_d ) for j_d in json_dirs_names ]
df_coords = [ ]
2023-12-05 19:07:26 +08:00
for i , json_dir in enumerate ( json_dirs ) :
2023-12-05 18:39:52 +08:00
df_coords . append ( convert_json2csv ( json_dir ) )
df_coords [ i ] = drop_col ( df_coords [ i ] , 3 ) # drop likelihood
2023-12-05 19:11:29 +08:00
b , a = signal . butter ( 42 , cut_off_frequency ( fps * 2 ) , ' low ' , analog = False )
2023-12-05 19:07:26 +08:00
for i in range ( len ( json_dirs ) ) :
2023-12-05 18:39:52 +08:00
df_coords [ i ] = pd . DataFrame ( signal . filtfilt ( b , a , df_coords [ i ] , axis = 0 ) ) # filter
## Pour sauvegarder et réouvrir au besoin
2023-12-05 19:07:26 +08:00
with open ( os . path . join ( pose_dir , ' coords ' ) , ' wb ' ) as fp :
2023-12-05 18:39:52 +08:00
pk . dump ( df_coords , fp )
# with open(os.path.join(pose_dir, 'coords'), 'rb') as fp
# df_coords = pk.load(fp)
#############################
# 1. COMPUTING SPEEDS #
#############################
# Vitesse verticale
df_speed = [ ]
2023-12-05 19:07:26 +08:00
for i in range ( len ( json_dirs ) ) :
if speed_kind == ' y ' :
2023-12-05 18:39:52 +08:00
df_speed . append ( speed_vert ( df_coords [ i ] ) )
2023-12-05 19:07:26 +08:00
elif speed_kind == ' 2D ' :
2023-12-05 18:39:52 +08:00
df_speed . append ( speed_2D ( df_coords [ i ] ) )
2023-12-05 19:07:26 +08:00
df_speed [ i ] = df_speed [ i ] . where ( df_speed [ i ] * vmax , other = np . nan )
2023-12-05 18:39:52 +08:00
df_speed [ i ] = df_speed [ i ] . apply ( interpolate_nans , axis = 0 , args = [ ' cubic ' ] )
#############################################
# 2. PLOTTING PAIRED CORRELATIONS OF SPEEDS #
#############################################
# Faire ça sur toutes les paires de cams
# Choisir paire avec corrélation la plus haute
# sur un point particulier (typiquement le poignet sur un mouvement vertical)
# ou sur tous les points
# ou sur une sélection de points pondérés
id_kpt_dict = { }
2023-12-05 19:07:26 +08:00
if len ( id_kpt ) == 1 and id_kpt != [ ' all ' ] :
2023-12-05 18:39:52 +08:00
camx = df_speed [ cam1_nb - 1 ] . loc [ range ( np . array ( frames ) ) , id_kpt [ 0 ] ]
camy = df_speed [ cam2_nb - 1 ] . loc [ range ( np . array ( frames ) ) , id_kpt [ 0 ] ]
2023-12-05 19:07:26 +08:00
elif id_kpt == [ ' all ' ] :
2023-12-05 18:39:52 +08:00
camx = df_speed [ cam1_nb - 1 ] . loc [ range ( np . array ( frames ) ) , ] . sum ( axis = 1 )
camy = df_speed [ cam2_nb - 1 ] . loc [ range ( np . array ( frames ) ) , ] . sum ( axis = 1 )
2023-12-05 19:07:26 +08:00
elif len ( id_kpt ) == 1 and len ( id_kpt ) == len ( weights_kpt ) : # ex id_kpt1=9 set to 10, id_kpt2=10 to 15
2023-12-05 18:39:52 +08:00
# ajouter frames
2023-12-05 19:07:26 +08:00
dict_id_weights = { i : w for i , w in zip ( id_kpt , weights_kpt ) }
2024-01-25 18:02:58 +08:00
camx = df_speed [ cam1_nb - 1 ] @ pd . Series ( dict_id_weights ) . reindex ( df_speed [ cam1_nb - 1 ] . columns , fill_value = 0 )
camy = df_speed [ cam2_nb - 1 ] @ pd . Series ( dict_id_weights ) . reindex ( df_speed [ cam2_nb - 1 ] . columns , fill_value = 0 )
2023-12-05 18:39:52 +08:00
camx = camx . loc [ range ( np . array ( frames ) ) ]
camy = camy . loc [ range ( np . array ( frames ) ) ]
2023-12-05 19:07:26 +08:00
else :
2023-12-05 18:39:52 +08:00
raise ValueError ( ' wrong values for id_kpt or weights_kpt ' )
2024-03-31 08:40:38 +08:00
# camx = df_speed[1][16]
# camy = df_speed[2][16]
# camx = df_speed[1][10]
# camy = df_speed[2][10]
# camx = df_speed[1].sum(axis=1)
# camy = df_speed[2].sum(axis=1)
# camx.plot()
# camy.plot()
# plt.show()
for i in range ( 25 ) :
df_coords [ 1 ] . iloc [ : , i * 2 + 1 ] . plot ( label = ' 1 ' )
df_coords [ 2 ] . iloc [ : , i * 2 + 1 ] . plot ( label = ' 2 ' )
plt . title ( i )
plt . legend ( )
plt . show ( )
for i in range ( 25 ) :
df_speed [ 1 ] . iloc [ : , i ] . plot ( label = ' 1 ' )
df_speed [ 2 ] . iloc [ : , i ] . plot ( label = ' 2 ' )
plt . title ( i )
plt . legend ( )
plt . show ( )
for i in range ( 4 ) :
abs ( df_speed [ i ] ) . sum ( axis = 1 ) . plot ( label = i )
plt . legend ( )
plt . show ( )
df_speed [ 0 ] . plot ( ) # --> remove janky points
plt . show ( )
2023-12-05 18:39:52 +08:00
f , ax = plt . subplots ( 2 , 1 )
# speed
camx . plot ( ax = ax [ 0 ] , label = f ' cam { cam1_nb } ' )
camy . plot ( ax = ax [ 0 ] , label = f ' cam { cam2_nb } ' )
ax [ 0 ] . set ( xlabel = ' Frame ' , ylabel = ' Speed (pxframe) ' )
ax [ 0 ] . legend ( )
# time lagged cross-correlation
offset , max_corr = plot_time_lagged_cross_corr ( camx , camy , ax [ 1 ] )
f . tight_layout ( )
plt . show ( )
##################################################################
# 3. ON CHANGE LES EXTENSIONS DES FICHIERS POUR SIMULER UN OFFSET#
##################################################################
# et on relance tout le code
2023-12-05 19:07:26 +08:00
if offset == 0 :
2023-12-05 18:39:52 +08:00
json_dir_to_offset = json_dirs [ cam2_nb - 1 ]
2023-12-05 19:07:26 +08:00
else :
2023-12-05 18:39:52 +08:00
json_dir_to_offset = json_dirs [ cam1_nb - 1 ]
offset = - offset
json_files = fnmatch . filter ( os . listdir ( json_dir_to_offset ) , ' .json ' ) [ offset ]
[ os . rename ( os . path . join ( json_dir_to_offset , json_file ) , os . path . join ( json_dir_to_offset , json_file + ' .old ' ) ) for json_file in json_files ]
# Reset remove all '.old'
json_files = fnmatch . filter ( os . listdir ( json_dir_to_offset ) , ' .old ' )
[ os . rename ( os . path . join ( json_dir_to_offset , json_file ) , os . path . join ( json_dir_to_offset , json_file [ - 4 ] ) ) for json_file in json_files ]