Merge branch 'dev' of https://github.com/nuluh/thesis into dev

This commit is contained in:
Rifqi D. Panuluh
2025-08-30 00:24:05 +00:00
2 changed files with 319 additions and 133 deletions

2
.gitattributes vendored
View File

@@ -1 +1,3 @@
*.ipynb filter=nbstripout *.ipynb filter=nbstripout
*.svg filter=lfs diff=lfs merge=lfs -text
*.png filter=lfs diff=lfs merge=lfs -text

View File

@@ -220,7 +220,7 @@
"import os\n", "import os\n",
"import pandas as pd\n", "import pandas as pd\n",
"import numpy as np\n", "import numpy as np\n",
"from scipy.signal import hann\n", "from scipy.signal.windows import hann\n",
"import multiprocessing" "import multiprocessing"
] ]
}, },
@@ -244,16 +244,6 @@
"Fs = 1024" "Fs = 1024"
] ]
}, },
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Define the base directory where DAMAGE_X folders are located\n",
"damage_base_path = 'D:/thesis/data/converted/raw'"
]
},
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
@@ -282,6 +272,13 @@
"damage_base_path = 'D:/thesis/data/converted/raw'" "damage_base_path = 'D:/thesis/data/converted/raw'"
] ]
}, },
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The path above is specifically intended to process STFT data using Dataset A"
]
},
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
@@ -290,8 +287,8 @@
"source": [ "source": [
"# Define output directories for each sensor exported data\n", "# Define output directories for each sensor exported data\n",
"output_dirs = {\n", "output_dirs = {\n",
" 'sensor1': os.path.join(damage_base_path, 'sensor1'),\n", " 'sensorA': os.path.join(damage_base_path, 'sensorA'),\n",
" 'sensor2': os.path.join(damage_base_path, 'sensor2')\n", " 'sensorB': os.path.join(damage_base_path, 'sensorB')\n",
"}" "}"
] ]
}, },
@@ -308,7 +305,7 @@
"\n", "\n",
"with multiprocessing.Pool() as pool:\n", "with multiprocessing.Pool() as pool:\n",
" # Process each DAMAGE_X case in parallel\n", " # Process each DAMAGE_X case in parallel\n",
" pool.map(process_damage_case, range(num_damage_cases), Fs, window_size, hop_size, output_dirs)" " pool.map(process_damage_case, range(num_damage_cases))"
] ]
}, },
{ {
@@ -345,7 +342,7 @@
"import os\n", "import os\n",
"import pandas as pd\n", "import pandas as pd\n",
"\n", "\n",
"ready_data1a = []\n", "ready_data1a: list[pd.DataFrame] = []\n",
"for file in os.listdir('D:/thesis/data/converted/raw/sensor1'):\n", "for file in os.listdir('D:/thesis/data/converted/raw/sensor1'):\n",
" ready_data1a.append(pd.read_csv(os.path.join('D:/thesis/data/converted/raw/sensor1', file), skiprows=1))" " ready_data1a.append(pd.read_csv(os.path.join('D:/thesis/data/converted/raw/sensor1', file), skiprows=1))"
] ]
@@ -357,7 +354,7 @@
"outputs": [], "outputs": [],
"source": [ "source": [
"# Load the processed data for Sensor 2\n", "# Load the processed data for Sensor 2\n",
"ready_data2a = []\n", "ready_data2a: list[pd.DataFrame] = []\n",
"for file in os.listdir('D:/thesis/data/converted/raw/sensor2'):\n", "for file in os.listdir('D:/thesis/data/converted/raw/sensor2'):\n",
" ready_data2a.append(pd.read_csv(os.path.join('D:/thesis/data/converted/raw/sensor2', file), skiprows=1))" " ready_data2a.append(pd.read_csv(os.path.join('D:/thesis/data/converted/raw/sensor2', file), skiprows=1))"
] ]
@@ -405,7 +402,14 @@
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"#### Preview Plot" "##### Preview Plot"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"###### Function plot"
] ]
}, },
{ {
@@ -414,7 +418,74 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"import matplotlib.pyplot as plt" "import matplotlib.pyplot as plt\n",
"from typing import Union"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def preview_stft(data: Union[pd.DataFrame, list[pd.DataFrame]], x_num_ticks: int = 6, y_num_ticks: int = 5, cols: int=3):\n",
" \"\"\"\n",
" Preview the Short-Time Fourier Transform (STFT) of the given data.\n",
"\n",
" Parameters:\n",
" -------\n",
" data (pd.DataFrame): The STFT data to be visualized.\n",
" x_num_ticks (int): Number of ticks on the x-axis (time frames). Defaults to 6.\n",
" y_num_ticks (int): Number of ticks on the y-axis (frequency bins). Defaults to 5.\n",
" \"\"\"\n",
" if type(data) == pd.DataFrame:\n",
" plt.figure(dpi=300) # Set figure size and DPI\n",
" plt.pcolormesh(data.transpose(), cmap='jet', vmax=0.03, vmin=0.0, rasterized=True)\n",
" # plt.title('STFT Preview')\n",
" plt.colorbar(label='Magnitude ($m/s^2$)')\n",
" plt.xlabel('Segmen Waktu')\n",
" plt.ylabel('Sampel Frekuensi (Hz)')\n",
" plt.xticks(np.linspace(0, len(data)-1, x_num_ticks)) # Set x-ticks at regular intervals\n",
" plt.yticks(np.linspace(0, len(data.columns)-1, y_num_ticks)) # Set y-ticks at regular intervals\n",
" plt.savefig(\"output_single.svg\", format=\"svg\", dpi=100)\n",
" plt.show()\n",
"\n",
" elif type(data) == list and len(data) > 1:\n",
" # Create a figure and subplots\n",
" fig, axes = plt.subplots(len(data)//cols, cols, figsize=(15, 8), sharex=True, sharey=True)\n",
"\n",
" # Flatten the axes array for easier iteration\n",
" axes = axes.flatten()\n",
"\n",
" # Loop through each subplot and plot the data\n",
" for i in range(len(data)):\n",
" pcm = axes[i].pcolormesh(data[i].transpose(), cmap='jet', vmax=0.03, vmin=0.0, rasterized=True)\n",
" axes[i].set_title(f'd_{i+1}', fontsize=12)\n",
" axes[i].set_xticks(np.linspace(0, len(data[i])-1, 6)) # Set x-ticks at regular intervals\n",
" axes[i].set_yticks(np.linspace(0, len(data[i].columns)-1, 9)) # Set y-ticks at regular intervals\n",
"\n",
" # Add a single color bar for all subplots\n",
" # Use the first `pcolormesh` object (or any valid one) for the color bar\n",
" cbar = fig.colorbar(pcm, ax=axes, orientation='vertical')\n",
" cbar.set_label('Magnitude ($m/s^2$)', fontsize=12)\n",
"\n",
" # Set shared labels\n",
" fig.text(0.5, 0.04, 'Segmen Waktu', ha='center', fontsize=12)\n",
" fig.text(0.04, 0.5, 'Sampel Frekuensi (Hz)', va='center', rotation='vertical', fontsize=12)\n",
" # fig.suptitle('STFT of Sensor A Damage Case (d1--d6) Dataset A', fontsize=16)\n",
"\n",
" # Adjust layout\n",
" # plt.tight_layout(rect=[0.05, 0.05, 1, 1]) # Leave space for shared labels\n",
" plt.subplots_adjust(left=0.1, right=0.75, top=0.9, bottom=0.1, wspace=0.2, hspace=0.2)\n",
" plt.savefig(\"output_multiple.svg\", format=\"svg\", dpi=80)\n",
" plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"###### Sensor A"
] ]
}, },
{ {
@@ -425,10 +496,14 @@
"source": [ "source": [
"# colormesh give title x is frequency and y is time and rotate/transpose the data\n", "# colormesh give title x is frequency and y is time and rotate/transpose the data\n",
"# Plotting the STFT Data\n", "# Plotting the STFT Data\n",
"plt.figure(dpi=300) # Set figure size and DPI\n", "preview_stft(ready_data1a[0], y_num_ticks=9) # Preview for Sensor 1"
"plt.pcolormesh(ready_data1a[0].transpose(), cmap='jet', vmax=0.03, vmin=0.0)\n", ]
"plt.title('STFT of Sensor A Dataset A Label 0 Undamaged')\n", },
"plt.show()" {
"cell_type": "markdown",
"metadata": {},
"source": [
"###### Sensor B"
] ]
}, },
{ {
@@ -437,10 +512,7 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"plt.figure(dpi=300) # Set figure size and DPI\n", "preview_stft(ready_data2a[0], y_num_ticks=9) # Preview for Sensor 2"
"plt.pcolormesh(ready_data2a[0].transpose(), cmap='jet', vmax=0.03, vmin=0.0)\n",
"plt.title('STFT of Sensor B Dataset A Label 0 Undamaged')\n",
"plt.show()"
] ]
}, },
{ {
@@ -461,7 +533,14 @@
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"#### Preview Plot" "##### Preview Plot"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"###### Sensor A"
] ]
}, },
{ {
@@ -470,33 +549,14 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"import matplotlib.pyplot as plt\n", "preview_stft(ready_data1a[1:], cols=3, y_num_ticks=9) # Preview for Sensor 1 Damage Cases (d1--d6)"
"# Create a figure and subplots\n", ]
"fig, axes = plt.subplots(2, 3, figsize=(15, 8), sharex=True, sharey=True)\n", },
"\n", {
"# Flatten the axes array for easier iteration\n", "cell_type": "markdown",
"axes = axes.flatten()\n", "metadata": {},
"\n", "source": [
"# Loop through each subplot and plot the data\n", "###### Sensor B"
"for i in range(6):\n",
" pcm = axes[i].pcolormesh(ready_data1a[i+1].transpose(), cmap='jet', vmax=0.03, vmin=0.0)\n",
" axes[i].set_title(f'Label {i+1}', fontsize=12)\n",
"\n",
"# Add a single color bar for all subplots\n",
"# Use the first `pcolormesh` object (or any valid one) for the color bar\n",
"cbar = fig.colorbar(pcm, ax=axes, orientation='vertical')\n",
"# cbar.set_label('Magnitude')\n",
"\n",
"# Set shared labels\n",
"fig.text(0.5, 0.04, 'Time Frames', ha='center', fontsize=12)\n",
"fig.text(0.04, 0.5, 'Frequency [Hz]', va='center', rotation='vertical', fontsize=12)\n",
"fig.suptitle('STFT of Sensor A Damage Case (d1--d6) Dataset A', fontsize=16)\n",
"\n",
"# Adjust layout\n",
"# plt.tight_layout(rect=[0.05, 0.05, 1, 1]) # Leave space for shared labels\n",
"plt.subplots_adjust(left=0.1, right=0.75, top=0.9, bottom=0.1, wspace=0.2, hspace=0.2)\n",
"\n",
"plt.show()"
] ]
}, },
{ {
@@ -505,33 +565,7 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"from cmcrameri import cm\n", "preview_stft(ready_data2a[1:], cols=3, y_num_ticks=9) # Preview for Sensor 1 Damage Cases (d1--d6)"
"# Create a figure and subplots\n",
"fig, axes = plt.subplots(2, 3, figsize=(15, 8), sharex=True, sharey=True)\n",
"\n",
"# Flatten the axes array for easier iteration\n",
"axes = axes.flatten()\n",
"\n",
"# Loop through each subplot and plot the data\n",
"for i in range(6):\n",
" pcm = axes[i].pcolormesh(ready_data2a[i+1].transpose(), cmap='jet', vmax=0.03, vmin=0.0)\n",
" axes[i].set_title(f'Label {i+1}', fontsize=12)\n",
"\n",
"# Add a single color bar for all subplots\n",
"# Use the first `pcolormesh` object (or any valid one) for the color bar\n",
"cbar = fig.colorbar(pcm, ax=axes, orientation='vertical')\n",
"# cbar.set_label('Magnitude')\n",
"\n",
"# Set shared labels\n",
"fig.text(0.5, 0.04, 'Time Frames', ha='center', fontsize=12)\n",
"fig.text(0.04, 0.5, 'Frequency [Hz]', va='center', rotation='vertical', fontsize=12)\n",
"fig.suptitle('STFT of Sensor B Damage Case (d1--d6) Dataset A', fontsize=16)\n",
"\n",
"# Adjust layout\n",
"# plt.tight_layout(rect=[0.05, 0.05, 1, 1]) # Leave space for shared labels\n",
"plt.subplots_adjust(left=0.1, right=0.75, top=0.9, bottom=0.1, wspace=0.2, hspace=0.2)\n",
"\n",
"plt.show()"
] ]
}, },
{ {
@@ -792,8 +826,8 @@
"source": [ "source": [
"from src.ml.model_selection import create_ready_data\n", "from src.ml.model_selection import create_ready_data\n",
"\n", "\n",
"X1b, y = create_ready_data('D:/thesis/data/converted/raw_B/sensor1') # sensor A\n", "X1b, y1 = create_ready_data('D:/thesis/data/converted/raw_B/sensor1') # sensor A\n",
"X2b, y = create_ready_data('D:/thesis/data/converted/raw_B/sensor2') # sensor B" "X2b, y2 = create_ready_data('D:/thesis/data/converted/raw_B/sensor2') # sensor B"
] ]
}, },
{ {
@@ -810,6 +844,17 @@
"#### Sensor A" "#### Sensor A"
] ]
}, },
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Rename first column using proper pandas method\n",
"X1b = X1b.rename(columns={X1b.columns[0]: \"Freq_0.00\"})\n",
"X2b = X2b.rename(columns={X2b.columns[0]: \"Freq_0.00\"})"
]
},
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": null,
@@ -818,8 +863,27 @@
"source": [ "source": [
"# 4. Sensor A Validate on Dataset B\n", "# 4. Sensor A Validate on Dataset B\n",
"from joblib import load\n", "from joblib import load\n",
"svm_model = load('D:/thesis/models/Sensor A/SVM with StandardScaler and PCA.joblib')\n", "from sklearn.svm import SVC\n",
"y_pred_svm_1 = svm_model.predict_proba(X1b)" "svm_model: SVC = load('D:/thesis/models/Sensor A/SVM with StandardScaler and PCA.joblib')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import time\n",
"\n",
"time_taken = np.array([])\n",
"for i in range(5): # Run multiple times to get an average time\n",
" start_time = time.time()\n",
" y_pred_svm_1 = svm_model.predict(X1b)\n",
" end_time = time.time()\n",
" time_taken = np.append(time_taken, end_time - start_time)\n",
"\n",
"print(time_taken)\n",
"print(time_taken.mean())\n"
] ]
}, },
{ {
@@ -831,9 +895,7 @@
"import numpy as np\n", "import numpy as np\n",
"\n", "\n",
"# Set NumPy to display full decimal values\n", "# Set NumPy to display full decimal values\n",
"np.set_printoptions(suppress=True, precision=6) # Suppress scientific notation, set precision to 6 decimals\n", "np.set_printoptions(suppress=True, precision=6) # Suppress scientific notation, set precision to 6 decimals"
"\n",
"y_pred_svm_1[1]"
] ]
}, },
{ {
@@ -845,37 +907,14 @@
"from sklearn.metrics import accuracy_score, classification_report\n", "from sklearn.metrics import accuracy_score, classification_report\n",
"\n", "\n",
"# 5. Evaluate\n", "# 5. Evaluate\n",
"print(\"Accuracy on Dataset B:\", accuracy_score(y, y_pred_svm_1))\n", "print(\"Accuracy on Dataset B:\", accuracy_score(y1, y_pred_svm_1))\n",
"print(classification_report(y, y_pred_svm_1))" "df = pd.DataFrame(classification_report(y1, y_pred_svm_1, output_dict=True)).T\n",
] "# Round numbers nicely and move 'accuracy' into a row that fits your desired layout\n",
}, "df_rounded = df.round(2)\n",
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Confusion Matrix Sensor A"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"from sklearn.metrics import confusion_matrix, ConfusionMatrixDisplay\n",
"\n", "\n",
"\n", "# Export to LaTeX\n",
"cm = confusion_matrix(y, y_pred_svm_1) # -> ndarray\n", "latex_table = df_rounded.to_latex(index=True, float_format=\"%.2f\", caption=\"Classification report on Dataset B\", label=\"tab:clf_report_auto\")\n",
"\n", "print(latex_table)"
"# get the class labels\n",
"labels = svm_model.classes_\n",
"\n",
"# Plot\n",
"disp = ConfusionMatrixDisplay(confusion_matrix=cm, display_labels=labels)\n",
"disp.plot(cmap=plt.cm.Blues) # You can change colormap\n",
"plt.title(\"Confusion Matrix of Sensor A Test on Dataset B\")\n",
"plt.show()"
] ]
}, },
{ {
@@ -891,20 +930,34 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"svm_model = load('D:/thesis/models/sensor2/SVM.joblib')\n", "# svm_model = load('D:/thesis/models/sensor2/SVM.joblib')\n",
"# svm_model = load('D:/thesis/models/sensor2/SVM with StandardScaler and PCA.joblib')\n", "svm_model = load('D:/thesis/models/sensor2/SVM with StandardScaler and PCA.joblib')\n",
"y_pred_svm_2 = svm_model.predict(X2b)\n", "y_pred_svm_2 = svm_model.predict(X2b)"
"\n", ]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# 5. Evaluate\n", "# 5. Evaluate\n",
"print(\"Accuracy on Dataset B:\", accuracy_score(y, y_pred_svm_2))\n", "import pandas as pd\n",
"print(classification_report(y, y_pred_svm_2))" "\n",
"df = pd.DataFrame(classification_report(y2, y_pred_svm_2, output_dict=True)).T\n",
"# Round numbers nicely and move 'accuracy' into a row that fits your desired layout\n",
"df_rounded = df.round(2)\n",
"\n",
"# Export to LaTeX\n",
"latex_table = df_rounded.to_latex(index=True, float_format=\"%.2f\", caption=\"Classification report on Dataset B\", label=\"tab:clf_report_auto\")\n",
"print(latex_table)"
] ]
}, },
{ {
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"#### Confusion Matrix Sensor B" "#### Confusion Matrix Sensor A and B"
] ]
}, },
{ {
@@ -915,17 +968,54 @@
"source": [ "source": [
"import matplotlib.pyplot as plt\n", "import matplotlib.pyplot as plt\n",
"from sklearn.metrics import confusion_matrix, ConfusionMatrixDisplay\n", "from sklearn.metrics import confusion_matrix, ConfusionMatrixDisplay\n",
"import numpy as np\n",
"\n", "\n",
"# Create a fresh figure with subplots\n",
"fig, axes = plt.subplots(1, 2, figsize=(12, 5))\n",
"\n", "\n",
"cm = confusion_matrix(y, y_pred_svm_2) # -> ndarray\n", "# Plot confusion matrix for Sensor A\n",
"cm_A = confusion_matrix(y, y_pred_svm_1)\n",
"disp_A = ConfusionMatrixDisplay(confusion_matrix=cm_A, display_labels=labels)\n",
"disp_A.plot(ax=axes[0], cmap=plt.cm.Blues)\n",
"axes[0].set_title(\"Sensor A\")\n",
"\n", "\n",
"# get the class labels\n", "# Plot confusion matrix for Sensor B\n",
"labels = svm_model.classes_\n", "cm_B = confusion_matrix(y, y_pred_svm_2)\n",
"disp_B = ConfusionMatrixDisplay(confusion_matrix=cm_B, display_labels=labels)\n",
"disp_B.plot(ax=axes[1], cmap=plt.cm.Blues)\n",
"axes[1].set_title(\"Sensor B\")\n",
"\n", "\n",
"# Plot\n", "# Find and modify colorbars to show max values\n",
"disp = ConfusionMatrixDisplay(confusion_matrix=cm, display_labels=labels)\n", "# The colorbars are typically the 3rd and 4th axes in the figure\n",
"disp.plot(cmap=plt.cm.Blues) # You can change colormap\n", "for i, (cbar_idx, cm) in enumerate(zip([2, 3], [cm_A, cm_B])):\n",
"plt.title(\"Confusion Matrix of Sensor B Test on Dataset B\")\n", " if cbar_idx < len(fig.axes):\n",
" cbar_ax = fig.axes[cbar_idx]\n",
" \n",
" # Get max value from the confusion matrix\n",
" max_val = cm.max()\n",
" \n",
" # Create a new set of ticks with reasonable spacing and ending with max_val\n",
" # For example, if max is around 2560, create ticks: [0, 500, 1000, 1500, 2000, 2560]\n",
" tick_interval = 500\n",
" new_ticks = list(range(0, int(max_val), tick_interval))\n",
" if np.isclose(new_ticks[-1], max_val, rtol=0.05):\n",
" new_ticks[-1] = max_val \n",
" else:\n",
" new_ticks.extend([max_val])\n",
" # Set the new ticks\n",
" cbar_ax.set_yticks(new_ticks)\n",
" \n",
" # Format tick labels as integers\n",
" # cbar_ax.set_yticklabels([f\"{int(t)}\" if t.is_integer() else f\"{t:.1f}\" for t in new_ticks])\n",
"\n",
"# Set SVG font rendering for better PDF output\n",
"plt.rcParams['svg.fonttype'] = 'none'\n",
"\n",
"# Adjust layout\n",
"plt.tight_layout()\n",
"\n",
"# Save and show\n",
"plt.savefig(\"output.svg\")\n",
"plt.show()" "plt.show()"
] ]
}, },
@@ -952,6 +1042,100 @@
"print(\"Accuracy on Dataset B:\", accuracy_score(y, y_pred_svm))\n", "print(\"Accuracy on Dataset B:\", accuracy_score(y, y_pred_svm))\n",
"print(classification_report(y, y_pred_svm))" "print(classification_report(y, y_pred_svm))"
] ]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Test with AU"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"\n",
"file_path = 'D:/thesis/data/dataset_B/zzzBU.TXT'\n",
"df = pd.read_csv(file_path, delim_whitespace=True, skiprows=10, header=0, memory_map=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"df1= df.iloc[:, [1]]\n",
"df2 = df.iloc[:, [26]]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from scipy.signal.windows import hann\n",
"from scipy.signal import stft\n",
"\n",
"window = 1024\n",
"hop = 512\n",
"\n",
"_, _, stft1 = stft(df1.values.flatten(), window=hann(window), nperseg=window, noverlap=hop, fs=window)\n",
"_, _, stft2 = stft(df2.values.flatten(), window=hann(window), nperseg=window, noverlap=hop, fs=window)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"a = pd.DataFrame(np.abs(stft1.T), columns=[f\"Freq_{freq:.2f}\" for freq in np.linspace(0, window/2, stft1.shape[1])]).rename(columns={a.columns[0]: \"00\"})\n",
"b = pd.DataFrame(np.abs(stft2.T), columns=[f\"Freq_{freq:.2f}\" for freq in np.linspace(0, window/2, stft2.shape[1])]).rename(columns={b.columns[0]: \"00\"})"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"y = [0]*len(a)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from joblib import load\n",
"\n",
"svm_model_a = load('D:/thesis/models/sensor1/SVM with StandardScaler and PCA.joblib')\n",
"svm_model_b = load('D:/thesis/models/sensor2/SVM with StandardScaler and PCA.joblib')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"y_pred_svm = svm_model_b.predict(b)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"y_pred_svm"
]
} }
], ],
"metadata": { "metadata": {