feat(src): implement data processing for dataset_B and export to CSV
This commit is contained in:
52
code/src/test.py
Normal file
52
code/src/test.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from data_preprocessing import *
|
||||
from joblib import dump, load
|
||||
|
||||
# b = generate_damage_files_index(
|
||||
# num_damage=6,
|
||||
# file_index_start=1,
|
||||
# col=5,
|
||||
# base_path="D:/thesis/data/dataset_B",
|
||||
# prefix="zzzBD",
|
||||
# # undamage_file="zzzBU.TXT"
|
||||
# )
|
||||
# Example: Generate tuples with a special group of df0 at the beginning
|
||||
# special_groups_A = [
|
||||
# {'df_name': 'zzzAU.TXT', 'position': 0, 'size': 5} # Add at beginning
|
||||
# ]
|
||||
|
||||
# special_groups_B = [
|
||||
# {'df_name': 'zzzBU.TXT', 'position': 0, 'size': 5} # Add at beginning
|
||||
# ]
|
||||
|
||||
# Generate the tuples with the special group
|
||||
# a_complement = [(comp)
|
||||
# for n in range(1, 31)
|
||||
# for comp in complement_pairs(n, prefix="zzzAD", extension="TXT")]
|
||||
# a = generate_df_tuples(special_groups=a_complement, prefix="zzzAD", undamage_file="zzzAU.TXT")
|
||||
|
||||
b_complement = [(comp)
|
||||
for n in range(1, 31)
|
||||
for comp in complement_pairs(n, prefix="zzzBD", extension="TXT")]
|
||||
b = generate_df_tuples(special_groups=b_complement, prefix="zzzBD", undamage_file="zzzBU.TXT")
|
||||
|
||||
|
||||
# a = generate_damage_files_index(
|
||||
# num_damage=6,
|
||||
# file_index_start=1,
|
||||
# col=5,
|
||||
# base_path="D:/thesis/data/dataset_A",
|
||||
# prefix="zzzAD",
|
||||
# # undamage_file="zzzBU.TXT"
|
||||
# )
|
||||
|
||||
# data_A = DataProcessor(file_index=a, base_path="D:/thesis/data/dataset_A", include_time=True)
|
||||
# data_A.create_vector_column(overwrite=True)
|
||||
# # data_A.create_limited_sensor_vector_column(overwrite=True)
|
||||
# data_A.export_to_csv("D:/thesis/data/converted/raw")
|
||||
|
||||
data_B = DataProcessor(file_index=b, base_path="D:/thesis/data/dataset_B", include_time=True)
|
||||
# data_B.create_vector_column(overwrite=True)
|
||||
# # data_B.create_limited_sensor_vector_column(overwrite=True)
|
||||
data_B.export_to_csv("D:/thesis/data/converted/raw_B")
|
||||
# a = load("D:/cache.joblib")
|
||||
# breakpoint()
|
||||
Reference in New Issue
Block a user