@@ -86,12 +86,12 @@ def count_persons_in_json(file_path):
8686
8787def indices_of_first_last_non_nan_chunks (series , min_chunk_size = 10 , chunk_choice_method = 'largest' ):
8888 '''
89- Find indices of the first and last chunks of at least min_chunk_size consecutive non-NaN values.
89+ Find indices of the chunks of at least min_chunk_size consecutive non-NaN values.
9090
9191 INPUT:
9292 - series: pandas Series to trim
93- - min_chunk_size: minimum size of consecutive non-NaN values to consider (default: 5 )
94- - chunk_choice_method: 'largest' to return the largest chunk, 'all' to return all of them ,
93+ - min_chunk_size: minimum size of consecutive non-NaN values to consider (default: 10 )
94+ - chunk_choice_method: 'largest' to return the largest chunk, 'all' to return everything between the first and last non-nan chunk ,
9595 'first' to return only the first one, 'last' to return only the last one
9696
9797 OUTPUT:
@@ -878,7 +878,7 @@ def triangulate_all(config_dict):
878878 # error_tot[0].to_csv(os.path.join(session_dir, 'error_tot.csv'), index=False, sep='\t')
879879
880880 # Trim around good frames and remove persons with too few frames
881- f_range_trimmed = [indices_of_first_last_non_nan_chunks (err ['mean' ], interp_gap_smaller_than ) for err in error_tot ]
881+ f_range_trimmed = [indices_of_first_last_non_nan_chunks (err ['mean' ], min_chunk_size = interp_gap_smaller_than , chunk_choice_method = 'all' ) for err in error_tot ]
882882 # f_range_trimmed = [f_range]*nb_persons_to_detect
883883 deleted_person_id = [n for n , f_range in enumerate (f_range_trimmed ) if len (range (* f_range ))< 4 ]
884884 Q_tot = [Q_tot [n ] for n in range (len (Q_tot )) if n not in deleted_person_id ]
0 commit comments