Compare commits
58 Commits
feature/48
...
latex/lite
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bffdbcaec2 | ||
|
|
bedc61a467 | ||
|
|
8adfa60519 | ||
|
|
1dcbb4cda8 | ||
|
|
38138c7c98 | ||
|
|
2016f4e780 | ||
|
|
0cf2834095 | ||
|
|
d5f43ba48b | ||
|
|
a872ae144c | ||
|
|
3d2432f26e | ||
|
|
1533a6ce46 | ||
|
|
471eccb12c | ||
|
|
74ea4d412c | ||
|
|
9d526971d9 | ||
|
|
49adb273d8 | ||
|
|
ee004e55f4 | ||
|
|
f97c58e114 | ||
|
|
190cd0904e | ||
|
|
6105da57f0 | ||
|
|
63da3b6308 | ||
|
|
5634776d26 | ||
|
|
e65026f9ca | ||
|
|
eb1d2a87b4 | ||
|
|
1f275fad42 | ||
|
|
07ed6a9a13 | ||
|
|
1b20376700 | ||
|
|
104b72e624 | ||
|
|
e9568583e4 | ||
|
|
ae201d61fa | ||
|
|
921dc9245c | ||
|
|
bf3c43639d | ||
|
|
f38d44df1d | ||
|
|
5c70d7db51 | ||
|
|
702760cc5e | ||
|
|
43a0f40182 | ||
|
|
92a7143d90 | ||
|
|
5e08d4f8c6 | ||
|
|
907f725fa7 | ||
|
|
676b2b1a87 | ||
|
|
e0fbc23257 | ||
|
|
39f966e71b | ||
|
|
740680d1c7 | ||
|
|
2db5170366 | ||
|
|
f83b890055 | ||
|
|
7820dd580a | ||
|
|
6c0fb67b86 | ||
|
|
792ed64027 | ||
|
|
c57a916a1a | ||
|
|
ca668ffc5f | ||
|
|
8d09adefd4 | ||
|
|
05926e3857 | ||
|
|
d13dfdc34e | ||
|
|
6b866b9ed5 | ||
|
|
4a796694bf | ||
|
|
6357136e6c | ||
|
|
c7584e2dd8 | ||
|
|
80ee9a3ec4 | ||
|
|
f9f346a57e |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,5 +1,4 @@
|
||||
# Ignore CSV files in the data directory and all its subdirectories
|
||||
data/**/*.csv
|
||||
.venv/
|
||||
*.pyc
|
||||
*.egg-info/
|
||||
*.pyc
|
||||
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@@ -1,4 +1,3 @@
|
||||
{
|
||||
"python.analysis.extraPaths": ["./code/src/features"],
|
||||
"jupyter.notebookFileRoot": "${workspaceFolder}/code"
|
||||
"python.analysis.extraPaths": ["./code/src/features"]
|
||||
}
|
||||
|
||||
@@ -16,8 +16,3 @@ The repository is private and access is restricted only to those who have been g
|
||||
All contents of this repository, including the thesis idea, code, and associated data, are copyrighted © 2024 by Rifqi Panuluh. Unauthorized use or duplication is prohibited.
|
||||
|
||||
[LICENSE](https://github.com/nuluh/thesis?tab=License-1-ov-file#readme)
|
||||
|
||||
## How to Run `stft.ipynb`
|
||||
|
||||
1. run `pip install -e .` in root project first
|
||||
2. run the notebook
|
||||
@@ -121,9 +121,8 @@
|
||||
"signal_sensor2_test1 = []\n",
|
||||
"\n",
|
||||
"for data in df:\n",
|
||||
" if not data.empty and 'sensor 1' in data.columns and 'sensor 2' in data.columns:\n",
|
||||
" signal_sensor1_test1.append(data['sensor 1'].values)\n",
|
||||
" signal_sensor2_test1.append(data['sensor 2'].values)\n",
|
||||
" signal_sensor1_test1.append(data['sensor 1'].values)\n",
|
||||
" signal_sensor2_test1.append(data['sensor 2'].values)\n",
|
||||
"\n",
|
||||
"print(len(signal_sensor1_test1))\n",
|
||||
"print(len(signal_sensor2_test1))"
|
||||
@@ -155,7 +154,9 @@
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"from scipy.signal import stft, hann\n",
|
||||
"# from multiprocessing import Pool\n",
|
||||
"from multiprocessing import Pool\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Function to compute and append STFT data\n",
|
||||
"def process_stft(args):\n",
|
||||
@@ -198,22 +199,23 @@
|
||||
" # Compute STFT\n",
|
||||
" frequencies, times, Zxx = stft(sensor_data, fs=Fs, window=window, nperseg=window_size, noverlap=window_size - hop_size)\n",
|
||||
" magnitude = np.abs(Zxx)\n",
|
||||
" df_stft = pd.DataFrame(magnitude, index=frequencies, columns=times).T\n",
|
||||
" df_stft.columns = [f\"Freq_{i}\" for i in frequencies]\n",
|
||||
" flattened_stft = magnitude.flatten()\n",
|
||||
" \n",
|
||||
" # Define the output CSV file path\n",
|
||||
" stft_file_name = f'stft_data{sensor_num}_{damage_num}.csv'\n",
|
||||
" sensor_output_dir = os.path.join(damage_base_path, sensor_name.lower())\n",
|
||||
" os.makedirs(sensor_output_dir, exist_ok=True)\n",
|
||||
" stft_file_path = os.path.join(sensor_output_dir, stft_file_name)\n",
|
||||
" print(stft_file_path)\n",
|
||||
" # Append the flattened STFT to the CSV\n",
|
||||
" try:\n",
|
||||
" flattened_stft_df = pd.DataFrame([flattened_stft])\n",
|
||||
" if not os.path.isfile(stft_file_path):\n",
|
||||
" # Create a new CSV\n",
|
||||
" df_stft.to_csv(stft_file_path, index=False, header=False)\n",
|
||||
" flattened_stft_df.to_csv(stft_file_path, index=False, header=False)\n",
|
||||
" else:\n",
|
||||
" # Append to existing CSV\n",
|
||||
" df_stft.to_csv(stft_file_path, mode='a', index=False, header=False)\n",
|
||||
" flattened_stft_df.to_csv(stft_file_path, mode='a', index=False, header=False)\n",
|
||||
" print(f\"Appended STFT data to {stft_file_path}\")\n",
|
||||
" except Exception as e:\n",
|
||||
" print(f\"Error writing to {stft_file_path}: {e}\")"
|
||||
@@ -293,7 +295,7 @@
|
||||
"\n",
|
||||
"# get current y ticks in list\n",
|
||||
"print(len(frequencies))\n",
|
||||
"print(len(times))"
|
||||
"print(len(times))\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -321,9 +323,10 @@
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"ready_data1a = []\n",
|
||||
"ready_data1 = []\n",
|
||||
"for file in os.listdir('D:/thesis/data/converted/raw/sensor1'):\n",
|
||||
" ready_data1a.append(pd.read_csv(os.path.join('D:/thesis/data/converted/raw/sensor1', file)))\n",
|
||||
" ready_data1.append(pd.read_csv(os.path.join('D:/thesis/data/converted/raw/sensor1', file)))\n",
|
||||
"ready_data1[0]\n",
|
||||
"# colormesh give title x is frequency and y is time and rotate/transpose the data\n",
|
||||
"# Plotting the STFT Data"
|
||||
]
|
||||
@@ -334,8 +337,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"len(ready_data1a)\n",
|
||||
"# plt.pcolormesh(ready_data1[0])"
|
||||
"ready_data1[1]\n",
|
||||
"plt.pcolormesh(ready_data1[1])"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -345,7 +348,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for i in range(6):\n",
|
||||
" plt.pcolormesh(ready_data1a[i])\n",
|
||||
" plt.pcolormesh(ready_data1[i])\n",
|
||||
" plt.title(f'STFT Magnitude for case {i} sensor 1')\n",
|
||||
" plt.xlabel(f'Frequency [Hz]')\n",
|
||||
" plt.ylabel(f'Time [sec]')\n",
|
||||
@@ -358,9 +361,10 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ready_data2a = []\n",
|
||||
"ready_data2 = []\n",
|
||||
"for file in os.listdir('D:/thesis/data/converted/raw/sensor2'):\n",
|
||||
" ready_data2a.append(pd.read_csv(os.path.join('D:/thesis/data/converted/raw/sensor2', file)))"
|
||||
" ready_data2.append(pd.read_csv(os.path.join('D:/thesis/data/converted/raw/sensor2', file)))\n",
|
||||
"ready_data2[5]"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -369,8 +373,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(len(ready_data1a))\n",
|
||||
"print(len(ready_data2a))"
|
||||
"print(len(ready_data1))\n",
|
||||
"print(len(ready_data2))"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -379,16 +383,35 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"x1a = 0\n",
|
||||
"print(type(ready_data1a[0]))\n",
|
||||
"ready_data1a[0].iloc[:,0]"
|
||||
"x1 = 0\n",
|
||||
"\n",
|
||||
"for i in range(len(ready_data1)):\n",
|
||||
" print(ready_data1[i].shape)\n",
|
||||
" x1 = x1 + ready_data1[i].shape[0]\n",
|
||||
"\n",
|
||||
"print(x1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"x2 = 0\n",
|
||||
"\n",
|
||||
"for i in range(len(ready_data2)):\n",
|
||||
" print(ready_data2[i].shape)\n",
|
||||
" x2 = x2 + ready_data2[i].shape[0]\n",
|
||||
"\n",
|
||||
"print(x2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"#### Checking length of the total array"
|
||||
"### Appending"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -397,14 +420,14 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"x1a = 0\n",
|
||||
"print(type(x1a))\n",
|
||||
"for i in range(len(ready_data1a)):\n",
|
||||
" print(type(ready_data1a[i].shape[0]))\n",
|
||||
" x1a = x1a + ready_data1a[i].shape[0]\n",
|
||||
" print(type(x1a))\n",
|
||||
"\n",
|
||||
"print(x1a)"
|
||||
"x1 = ready_data1[0]\n",
|
||||
"# print(x1)\n",
|
||||
"print(type(x1))\n",
|
||||
"for i in range(len(ready_data1) - 1):\n",
|
||||
" #print(i)\n",
|
||||
" x1 = np.concatenate((x1, ready_data1[i + 1]), axis=0)\n",
|
||||
"# print(x1)\n",
|
||||
"pd.DataFrame(x1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -413,75 +436,29 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"x2a = 0\n",
|
||||
"x2 = ready_data2[0]\n",
|
||||
"\n",
|
||||
"for i in range(len(ready_data2a)):\n",
|
||||
" print(ready_data2a[i].shape)\n",
|
||||
" x2a = x2a + ready_data2a[i].shape[0]\n",
|
||||
"\n",
|
||||
"print(x2a)"
|
||||
"for i in range(len(ready_data2) - 1):\n",
|
||||
" #print(i)\n",
|
||||
" x2 = np.concatenate((x2, ready_data2[i + 1]), axis=0)\n",
|
||||
"pd.DataFrame(x2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"print(x1.shape)\n",
|
||||
"print(x2.shape)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Flatten 6 array into one array"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Combine all dataframes in ready_data1a into a single dataframe\n",
|
||||
"if ready_data1a: # Check if the list is not empty\n",
|
||||
" # Use pandas concat function instead of iterative concatenation\n",
|
||||
" combined_data = pd.concat(ready_data1a, axis=0, ignore_index=True)\n",
|
||||
" \n",
|
||||
" print(f\"Type of combined data: {type(combined_data)}\")\n",
|
||||
" print(f\"Shape of combined data: {combined_data.shape}\")\n",
|
||||
" \n",
|
||||
" # Display the combined dataframe\n",
|
||||
" combined_data\n",
|
||||
"else:\n",
|
||||
" print(\"No data available in ready_data1a list\")\n",
|
||||
" combined_data = pd.DataFrame()\n",
|
||||
"\n",
|
||||
"# Store the result in x1a for compatibility with subsequent code\n",
|
||||
"x1a = combined_data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Combine all dataframes in ready_data1a into a single dataframe\n",
|
||||
"if ready_data2a: # Check if the list is not empty\n",
|
||||
" # Use pandas concat function instead of iterative concatenation\n",
|
||||
" combined_data = pd.concat(ready_data2a, axis=0, ignore_index=True)\n",
|
||||
" \n",
|
||||
" print(f\"Type of combined data: {type(combined_data)}\")\n",
|
||||
" print(f\"Shape of combined data: {combined_data.shape}\")\n",
|
||||
" \n",
|
||||
" # Display the combined dataframe\n",
|
||||
" combined_data\n",
|
||||
"else:\n",
|
||||
" print(\"No data available in ready_data1a list\")\n",
|
||||
" combined_data = pd.DataFrame()\n",
|
||||
"\n",
|
||||
"# Store the result in x1a for compatibility with subsequent code\n",
|
||||
"x2a = combined_data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Creating the label"
|
||||
"### Labeling"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -504,8 +481,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_data = [y_1, y_2, y_3, y_4, y_5, y_6]\n",
|
||||
"y_data"
|
||||
"y_data = [y_1, y_2, y_3, y_4, y_5, y_6]"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -515,7 +491,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for i in range(len(y_data)):\n",
|
||||
" print(ready_data1a[i].shape[0])"
|
||||
" print(ready_data1[i].shape[0])"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -524,9 +500,19 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"for i in range(len(y_data)):\n",
|
||||
" y_data[i] = [y_data[i]]*ready_data1a[i].shape[0]"
|
||||
" print(ready_data2[i].shape[0])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for i in range(len(y_data)):\n",
|
||||
" y_data[i] = [y_data[i]]*ready_data1[i].shape[0]\n",
|
||||
" y_data[i] = np.array(y_data[i])"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -536,7 +522,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# len(y_data[0])\n",
|
||||
"y_data"
|
||||
"y_data[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -568,10 +554,10 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from src.ml.model_selection import create_ready_data\n",
|
||||
"from sklearn.model_selection import train_test_split\n",
|
||||
"\n",
|
||||
"X1a, y = create_ready_data('D:/thesis/data/converted/raw/sensor1')\n",
|
||||
"X2a, y = create_ready_data('D:/thesis/data/converted/raw/sensor2')"
|
||||
"x_train1, x_test1, y_train, y_test = train_test_split(x1, y, test_size=0.2, random_state=2)\n",
|
||||
"x_train2, x_test2, y_train, y_test = train_test_split(x2, y, test_size=0.2, random_state=2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -581,17 +567,6 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.model_selection import train_test_split\n",
|
||||
"\n",
|
||||
"x_train1, x_test1, y_train, y_test = train_test_split(X1a, y, test_size=0.2, random_state=2)\n",
|
||||
"x_train2, x_test2, y_train, y_test = train_test_split(X2a, y, test_size=0.2, random_state=2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.metrics import accuracy_score\n",
|
||||
"from sklearn.ensemble import RandomForestClassifier, BaggingClassifier\n",
|
||||
"from sklearn.tree import DecisionTreeClassifier\n",
|
||||
@@ -624,17 +599,16 @@
|
||||
"\n",
|
||||
"\n",
|
||||
"# 1. Random Forest\n",
|
||||
"rf_model1 = RandomForestClassifier()\n",
|
||||
"rf_model1.fit(x_train1, y_train)\n",
|
||||
"rf_pred1 = rf_model1.predict(x_test1)\n",
|
||||
"rf_model = RandomForestClassifier()\n",
|
||||
"rf_model.fit(x_train1, y_train)\n",
|
||||
"rf_pred1 = rf_model.predict(x_test1)\n",
|
||||
"acc1 = accuracy_score(y_test, rf_pred1) * 100\n",
|
||||
"accuracies1.append(acc1)\n",
|
||||
"# format with color coded if acc1 > 90\n",
|
||||
"acc1 = f\"\\033[92m{acc1:.2f}\\033[00m\" if acc1 > 90 else f\"{acc1:.2f}\"\n",
|
||||
"print(\"Random Forest Accuracy for sensor 1:\", acc1)\n",
|
||||
"rf_model2 = RandomForestClassifier()\n",
|
||||
"rf_model2.fit(x_train2, y_train)\n",
|
||||
"rf_pred2 = rf_model2.predict(x_test2)\n",
|
||||
"rf_model.fit(x_train2, y_train)\n",
|
||||
"rf_pred2 = rf_model.predict(x_test2)\n",
|
||||
"acc2 = accuracy_score(y_test, rf_pred2) * 100\n",
|
||||
"accuracies2.append(acc2)\n",
|
||||
"# format with color coded if acc2 > 90\n",
|
||||
@@ -644,17 +618,16 @@
|
||||
"# print(y_test)\n",
|
||||
"\n",
|
||||
"# 2. Bagged Trees\n",
|
||||
"bagged_model1 = BaggingClassifier(estimator=DecisionTreeClassifier(), n_estimators=10)\n",
|
||||
"bagged_model1.fit(x_train1, y_train)\n",
|
||||
"bagged_pred1 = bagged_model1.predict(x_test1)\n",
|
||||
"bagged_model = BaggingClassifier(estimator=DecisionTreeClassifier(), n_estimators=10)\n",
|
||||
"bagged_model.fit(x_train1, y_train)\n",
|
||||
"bagged_pred1 = bagged_model.predict(x_test1)\n",
|
||||
"acc1 = accuracy_score(y_test, bagged_pred1) * 100\n",
|
||||
"accuracies1.append(acc1)\n",
|
||||
"# format with color coded if acc1 > 90\n",
|
||||
"acc1 = f\"\\033[92m{acc1:.2f}\\033[00m\" if acc1 > 90 else f\"{acc1:.2f}\"\n",
|
||||
"print(\"Bagged Trees Accuracy for sensor 1:\", acc1)\n",
|
||||
"bagged_model2 = BaggingClassifier(estimator=DecisionTreeClassifier(), n_estimators=10)\n",
|
||||
"bagged_model2.fit(x_train2, y_train)\n",
|
||||
"bagged_pred2 = bagged_model2.predict(x_test2)\n",
|
||||
"bagged_model.fit(x_train2, y_train)\n",
|
||||
"bagged_pred2 = bagged_model.predict(x_test2)\n",
|
||||
"acc2 = accuracy_score(y_test, bagged_pred2) * 100\n",
|
||||
"accuracies2.append(acc2)\n",
|
||||
"# format with color coded if acc2 > 90\n",
|
||||
@@ -670,9 +643,8 @@
|
||||
"# format with color coded if acc1 > 90\n",
|
||||
"acc1 = f\"\\033[92m{acc1:.2f}\\033[00m\" if acc1 > 90 else f\"{acc1:.2f}\"\n",
|
||||
"print(\"Decision Tree Accuracy for sensor 1:\", acc1)\n",
|
||||
"dt_model2 = DecisionTreeClassifier()\n",
|
||||
"dt_model2.fit(x_train2, y_train)\n",
|
||||
"dt_pred2 = dt_model2.predict(x_test2)\n",
|
||||
"dt_model.fit(x_train2, y_train)\n",
|
||||
"dt_pred2 = dt_model.predict(x_test2)\n",
|
||||
"acc2 = accuracy_score(y_test, dt_pred2) * 100\n",
|
||||
"accuracies2.append(acc2)\n",
|
||||
"# format with color coded if acc2 > 90\n",
|
||||
@@ -688,9 +660,8 @@
|
||||
"# format with color coded if acc1 > 90\n",
|
||||
"acc1 = f\"\\033[92m{acc1:.2f}\\033[00m\" if acc1 > 90 else f\"{acc1:.2f}\"\n",
|
||||
"print(\"KNeighbors Accuracy for sensor 1:\", acc1)\n",
|
||||
"knn_model2 = KNeighborsClassifier()\n",
|
||||
"knn_model2.fit(x_train2, y_train)\n",
|
||||
"knn_pred2 = knn_model2.predict(x_test2)\n",
|
||||
"knn_model.fit(x_train2, y_train)\n",
|
||||
"knn_pred2 = knn_model.predict(x_test2)\n",
|
||||
"acc2 = accuracy_score(y_test, knn_pred2) * 100\n",
|
||||
"accuracies2.append(acc2)\n",
|
||||
"# format with color coded if acc2 > 90\n",
|
||||
@@ -706,9 +677,8 @@
|
||||
"# format with color coded if acc1 > 90\n",
|
||||
"acc1 = f\"\\033[92m{acc1:.2f}\\033[00m\" if acc1 > 90 else f\"{acc1:.2f}\"\n",
|
||||
"print(\"Linear Discriminant Analysis Accuracy for sensor 1:\", acc1)\n",
|
||||
"lda_model2 = LinearDiscriminantAnalysis()\n",
|
||||
"lda_model2.fit(x_train2, y_train)\n",
|
||||
"lda_pred2 = lda_model2.predict(x_test2)\n",
|
||||
"lda_model.fit(x_train2, y_train)\n",
|
||||
"lda_pred2 = lda_model.predict(x_test2)\n",
|
||||
"acc2 = accuracy_score(y_test, lda_pred2) * 100\n",
|
||||
"accuracies2.append(acc2)\n",
|
||||
"# format with color coded if acc2 > 90\n",
|
||||
@@ -724,9 +694,8 @@
|
||||
"# format with color coded if acc1 > 90\n",
|
||||
"acc1 = f\"\\033[92m{acc1:.2f}\\033[00m\" if acc1 > 90 else f\"{acc1:.2f}\"\n",
|
||||
"print(\"Support Vector Machine Accuracy for sensor 1:\", acc1)\n",
|
||||
"svm_model2 = SVC()\n",
|
||||
"svm_model2.fit(x_train2, y_train)\n",
|
||||
"svm_pred2 = svm_model2.predict(x_test2)\n",
|
||||
"svm_model.fit(x_train2, y_train)\n",
|
||||
"svm_pred2 = svm_model.predict(x_test2)\n",
|
||||
"acc2 = accuracy_score(y_test, svm_pred2) * 100\n",
|
||||
"accuracies2.append(acc2)\n",
|
||||
"# format with color coded if acc2 > 90\n",
|
||||
@@ -742,9 +711,8 @@
|
||||
"# format with color coded if acc1 > 90\n",
|
||||
"acc1 = f\"\\033[92m{acc1:.2f}\\033[00m\" if acc1 > 90 else f\"{acc1:.2f}\"\n",
|
||||
"print(\"XGBoost Accuracy:\", acc1)\n",
|
||||
"xgboost_model2 = XGBClassifier()\n",
|
||||
"xgboost_model2.fit(x_train2, y_train)\n",
|
||||
"xgboost_pred2 = xgboost_model2.predict(x_test2)\n",
|
||||
"xgboost_model.fit(x_train2, y_train)\n",
|
||||
"xgboost_pred2 = xgboost_model.predict(x_test2)\n",
|
||||
"acc2 = accuracy_score(y_test, xgboost_pred2) * 100\n",
|
||||
"accuracies2.append(acc2)\n",
|
||||
"# format with color coded if acc2 > 90\n",
|
||||
@@ -821,75 +789,57 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from src.ml.model_selection import create_ready_data\n",
|
||||
"def spectograph(data_dir: str):\n",
|
||||
" # print(os.listdir(data_dir))\n",
|
||||
" for damage in os.listdir(data_dir):\n",
|
||||
" # print(damage)\n",
|
||||
" d = os.path.join(data_dir, damage)\n",
|
||||
" # print(d)\n",
|
||||
" for file in os.listdir(d):\n",
|
||||
" # print(file)\n",
|
||||
" f = os.path.join(d, file)\n",
|
||||
" print(f)\n",
|
||||
" # sensor1 = pd.read_csv(f, skiprows=1, sep=';')\n",
|
||||
" # sensor2 = pd.read_csv(f, skiprows=1, sep=';')\n",
|
||||
"\n",
|
||||
"X1b, y = create_ready_data('D:/thesis/data/converted/raw_B/sensor1')\n",
|
||||
"X2b, y = create_ready_data('D:/thesis/data/converted/raw_B/sensor2')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.metrics import accuracy_score, classification_report\n",
|
||||
"# 4. Validate on Dataset B\n",
|
||||
"y_pred_svm = svm_model.predict(X1b)\n",
|
||||
" # df1 = pd.DataFrame()\n",
|
||||
"\n",
|
||||
"# 5. Evaluate\n",
|
||||
"print(\"Accuracy on Dataset B:\", accuracy_score(y, y_pred_svm))\n",
|
||||
"print(classification_report(y, y_pred_svm))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.metrics import accuracy_score, classification_report\n",
|
||||
"# 4. Validate on Dataset B\n",
|
||||
"y_pred = rf_model2.predict(X2b)\n",
|
||||
" # df1['s1'] = sensor1[sensor1.columns[-1]]\n",
|
||||
" # df1['s2'] = sensor2[sensor2.columns[-1]]\n",
|
||||
" # # Combined Plot for sensor 1 and sensor 2 from data1 file in which motor is operated at 800 rpm\n",
|
||||
"\n",
|
||||
"# 5. Evaluate\n",
|
||||
"print(\"Accuracy on Dataset B:\", accuracy_score(y, y_pred))\n",
|
||||
"print(classification_report(y, y_pred))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y_predict = svm_model2.predict(X2b.iloc[[5312],:])\n",
|
||||
"print(y_predict)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"y[5312]"
|
||||
" # plt.plot(df1['s2'], label='sensor 2')\n",
|
||||
" # plt.plot(df1['s1'], label='sensor 1')\n",
|
||||
" # plt.xlabel(\"Number of samples\")\n",
|
||||
" # plt.ylabel(\"Amplitude\")\n",
|
||||
" # plt.title(\"Raw vibration signal\")\n",
|
||||
" # plt.legend()\n",
|
||||
" # plt.show()\n",
|
||||
"\n",
|
||||
" # from scipy import signal\n",
|
||||
" # from scipy.signal.windows import hann\n",
|
||||
"\n",
|
||||
" # vibration_data = df1['s1']\n",
|
||||
"\n",
|
||||
" # # Applying STFT\n",
|
||||
" # window_size = 1024\n",
|
||||
" # hop_size = 512\n",
|
||||
" # window = hann(window_size) # Creating a Hanning window\n",
|
||||
" # frequencies, times, Zxx = signal.stft(vibration_data, window=window, nperseg=window_size, noverlap=window_size - hop_size)\n",
|
||||
"\n",
|
||||
" # # Plotting the STFT Data\n",
|
||||
" # plt.pcolormesh(times, frequencies, np.abs(Zxx), shading='gouraud')\n",
|
||||
" # plt.title(f'STFT Magnitude for case 1 signal sensor 1 ')\n",
|
||||
" # plt.ylabel('Frequency [Hz]')\n",
|
||||
" # plt.xlabel('Time [sec]')\n",
|
||||
" # plt.show()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Confusion Matrix"
|
||||
"## Test with Outside of Its Training Data"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -897,52 +847,7 @@
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"from sklearn.metrics import confusion_matrix, ConfusionMatrixDisplay\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"cm = confusion_matrix(y, y_pred_svm) # -> ndarray\n",
|
||||
"\n",
|
||||
"# get the class labels\n",
|
||||
"labels = svm_model.classes_\n",
|
||||
"\n",
|
||||
"# Plot\n",
|
||||
"disp = ConfusionMatrixDisplay(confusion_matrix=cm, display_labels=labels)\n",
|
||||
"disp.plot(cmap=plt.cm.Blues) # You can change colormap\n",
|
||||
"plt.title(\"SVM Sensor1 CM Train w/ Dataset A Val w/ Dataset B\")\n",
|
||||
"plt.show()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"#### Self-test CM"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# 1. Predict sensor 1 on Dataset A\n",
|
||||
"y_train_pred = svm_model.predict(x_train1)\n",
|
||||
"\n",
|
||||
"# 2. Import confusion matrix tools\n",
|
||||
"from sklearn.metrics import confusion_matrix, ConfusionMatrixDisplay\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"\n",
|
||||
"# 3. Create and plot confusion matrix\n",
|
||||
"cm_train = confusion_matrix(y_train, y_train_pred)\n",
|
||||
"labels = svm_model.classes_\n",
|
||||
"\n",
|
||||
"disp = ConfusionMatrixDisplay(confusion_matrix=cm_train, display_labels=labels)\n",
|
||||
"disp.plot(cmap=plt.cm.Blues)\n",
|
||||
"plt.title(\"Confusion Matrix: Train & Test on Dataset A\")\n",
|
||||
"plt.show()\n"
|
||||
]
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import os
|
||||
from sklearn.model_selection import train_test_split as sklearn_split
|
||||
|
||||
|
||||
def create_ready_data(
|
||||
stft_data_path: str,
|
||||
stratify: np.ndarray = None,
|
||||
) -> tuple:
|
||||
"""
|
||||
Create a stratified train-test split from STFT data.
|
||||
|
||||
Parameters:
|
||||
-----------
|
||||
stft_data_path : str
|
||||
Path to the directory containing STFT data files (e.g. 'data/converted/raw/sensor1')
|
||||
stratify : np.ndarray, optional
|
||||
Labels to use for stratified sampling
|
||||
|
||||
Returns:
|
||||
--------
|
||||
tuple
|
||||
(X_train, X_test, y_train, y_test) - Split datasets
|
||||
"""
|
||||
ready_data = []
|
||||
for file in os.listdir(stft_data_path):
|
||||
ready_data.append(pd.read_csv(os.path.join(stft_data_path, file)))
|
||||
|
||||
y_data = [i for i in range(len(ready_data))]
|
||||
|
||||
# Combine all dataframes in ready_data into a single dataframe
|
||||
if ready_data: # Check if the list is not empty
|
||||
# Use pandas concat function instead of iterative concatenation
|
||||
combined_data = pd.concat(ready_data, axis=0, ignore_index=True)
|
||||
|
||||
print(f"Type of combined data: {type(combined_data)}")
|
||||
print(f"Shape of combined data: {combined_data.shape}")
|
||||
else:
|
||||
print("No data available in ready_data list")
|
||||
combined_data = pd.DataFrame()
|
||||
|
||||
# Store the result in x1a for compatibility with subsequent code
|
||||
X = combined_data
|
||||
|
||||
for i in range(len(y_data)):
|
||||
y_data[i] = [y_data[i]] * ready_data[i].shape[0]
|
||||
y_data[i] = np.array(y_data[i])
|
||||
|
||||
if y_data:
|
||||
# Use numpy concatenate function instead of iterative concatenation
|
||||
y = np.concatenate(y_data, axis=0)
|
||||
else:
|
||||
print("No labels available in y_data list")
|
||||
y = np.array([])
|
||||
|
||||
return X, y
|
||||
@@ -2,7 +2,6 @@ import pandas as pd
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import numpy as np
|
||||
from colorama import Fore, Style, init
|
||||
from typing import TypedDict, Dict, List
|
||||
from joblib import load
|
||||
@@ -226,56 +225,25 @@ class DataProcessor:
|
||||
"""
|
||||
idx = self._create_vector_column_index()
|
||||
# if overwrite:
|
||||
for i in range(len(self.data)): # damage(s)
|
||||
for j in range(len(self.data[i])): # col(s)
|
||||
for i in range(len(self.data)):
|
||||
for j in range(len(self.data[i])):
|
||||
# Get the appropriate indices for slicing from idx
|
||||
indices = idx[j]
|
||||
|
||||
# Get the current DataFrame
|
||||
df = self.data[i][j]
|
||||
|
||||
# Keep the 'Time' column and select only specifid 'Real' colmns
|
||||
# First, we add 1 to all indices to acount for 'Time' being at positiion 0
|
||||
# Keep the 'Time' column and select only specified 'Real' columns
|
||||
# First, we add 1 to all indices to account for 'Time' being at position 0
|
||||
real_indices = [index + 1 for index in indices]
|
||||
|
||||
# Create list with Time column index (0) and the adjustedd Real indices
|
||||
# Create list with Time column index (0) and the adjusted Real indices
|
||||
all_indices = [0] + [real_indices[0]] + [real_indices[-1]]
|
||||
|
||||
# Apply the slicing
|
||||
self.data[i][j] = df.iloc[:, all_indices]
|
||||
# TODO: if !overwrite:
|
||||
|
||||
def export_to_csv(self, output_dir: str, file_prefix: str = "DAMAGE"):
|
||||
"""
|
||||
Export the processed data to CSV files in the required folder structure.
|
||||
|
||||
:param output_dir: Directory to save the CSV files.
|
||||
:param file_prefix: Prefix for the output filenames.
|
||||
"""
|
||||
for group_idx, group in enumerate(self.data, start=1):
|
||||
group_folder = os.path.join(output_dir, f"{file_prefix}_{group_idx}")
|
||||
os.makedirs(group_folder, exist_ok=True)
|
||||
for test_idx, df in enumerate(group, start=1):
|
||||
# Ensure columns are named uniquely if duplicated
|
||||
df = df.copy()
|
||||
df.columns = ["Time", "Real_0", "Real_1"] # Rename
|
||||
|
||||
# Export first Real column
|
||||
out1 = os.path.join(
|
||||
group_folder, f"{file_prefix}_{group_idx}_TEST{test_idx}_01.csv"
|
||||
)
|
||||
df[["Time", "Real_0"]].rename(columns={"Real_0": "Real"}).to_csv(
|
||||
out1, index=False
|
||||
)
|
||||
|
||||
# Export last Real column
|
||||
out2 = os.path.join(
|
||||
group_folder, f"{file_prefix}_{group_idx}_TEST{test_idx}_02.csv"
|
||||
)
|
||||
df[["Time", "Real_1"]].rename(columns={"Real_1": "Real"}).to_csv(
|
||||
out2, index=False
|
||||
)
|
||||
|
||||
|
||||
def create_damage_files(base_path, output_base, prefix):
|
||||
# Initialize colorama
|
||||
|
||||
@@ -4,22 +4,5 @@ from joblib import dump, load
|
||||
# a = generate_damage_files_index(
|
||||
# num_damage=6, file_index_start=1, col=5, base_path="D:/thesis/data/dataset_A"
|
||||
# )
|
||||
|
||||
b = generate_damage_files_index(
|
||||
num_damage=6,
|
||||
file_index_start=1,
|
||||
col=5,
|
||||
base_path="D:/thesis/data/dataset_B",
|
||||
prefix="zzzBD",
|
||||
)
|
||||
# data_A = DataProcessor(file_index=a)
|
||||
# # data.create_vector_column(overwrite=True)
|
||||
# data_A.create_limited_sensor_vector_column(overwrite=True)
|
||||
# data_A.export_to_csv("D:/thesis/data/converted/raw")
|
||||
|
||||
data_B = DataProcessor(file_index=b)
|
||||
# data.create_vector_column(overwrite=True)
|
||||
data_B.create_limited_sensor_vector_column(overwrite=True)
|
||||
data_B.export_to_csv("D:/thesis/data/converted/raw_B")
|
||||
# a = load("D:/cache.joblib")
|
||||
# breakpoint()
|
||||
# dump(DataProcessor(file_index=a), "D:/cache.joblib")
|
||||
a = load("D:/cache.joblib")
|
||||
|
||||
41
latex/appendix/important/abdeljaber2017.tex
Normal file
41
latex/appendix/important/abdeljaber2017.tex
Normal file
@@ -0,0 +1,41 @@
|
||||
2 %Nomor
|
||||
|
||||
%for mult rows
|
||||
|
||||
& %Judul Jurnal
|
||||
Real-time vibration-based structural damage detection using one-dimensional convolutional neural networks \href{https://doi.org/10.1016/j.jsv.2016.10.043}{10.1016/j.jsv.
|
||||
2016.10.043}
|
||||
|
||||
%for mult rows
|
||||
|
||||
% & %Author
|
||||
% % Satish B Satpal; Yogesh Khandare; Anirban Guha; Sauvik Banerjee
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Nama Jurnal
|
||||
% International Journal of Advanced Structural Engineering (IJASE)
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Sumber
|
||||
% \href{http://dx.doi.org/10.1186/2008-6695-5-2}{ResearchGate}
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tahun
|
||||
% 2020
|
||||
|
||||
% %for mult rows
|
||||
|
||||
& %Tujuan penelitian
|
||||
Mengidentifikasi lokasi kerusakan struktur secara \textit{real-time} dengan memproses sinyal getaran mentah yang diambil dari jaringan-jaringan akselerometer pada setiap titik tanpa proses tambahan atau ekstraksi fitur.
|
||||
|
||||
& %Kesimpulan
|
||||
% Studi ini menilai kemampuan mesin vektor pendukung untuk memprediksi intensitas kerusakan dan lokasi pada balok kantilever. Meskipun berhasil memprediksi kerusakan dengan sedikit kesalahan, tingkat kebisingan dan lokasi kerusakan memengaruhi keakuratan. Tingkat kebisingan yang tinggi mempengaruhi kinerja secara signifikan, terutama pada intensitas kerusakan yang lebih rendah.
|
||||
& % Gap Research
|
||||
\begin{enumerate}
|
||||
\item Riset ini hanya dilakukan dengan \textit{full-grid array} akselerometer yang diletakkan pada setiap \textit{node} kerusakan, sehingga memerlukan banyak perangkat akselerometer.
|
||||
|
||||
\item Tidak ada komparasi performa efisiensi dan akurasi dengan algoritma pembelajaran mesin lain yang lebih populer sebelumnya.
|
||||
\end{enumerate}
|
||||
68
latex/appendix/important/van2020.tex
Normal file
68
latex/appendix/important/van2020.tex
Normal file
@@ -0,0 +1,68 @@
|
||||
1
|
||||
|
||||
%for mult rows
|
||||
|
||||
&
|
||||
Statistical Feature Extraction in Machine Fault Detection using Vibration Signal (\href{https://doi.org/10.1109/ICTC49870.2020.9289285}{10.1109/ICTC49870.
|
||||
2020.9289285})
|
||||
%for mult rows
|
||||
|
||||
% &
|
||||
% Donghui Xu; Xiang Xu; Michael C. Forde; Antonio Caballero
|
||||
|
||||
%for mult rows
|
||||
|
||||
% &
|
||||
% Construction and Building Materials
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% &
|
||||
% \href{https://doi.org/10.1016/j.conbuildmat.2023.132596}{ScienceDirect}
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% &
|
||||
% 2023
|
||||
|
||||
% %for mult rows
|
||||
|
||||
&
|
||||
\begin{enumerate}[series=enum]
|
||||
\item Menginvestigasi cara mengklasifikasi kondisi \textit{gearbox} normal dan rusak menggunakan sinyal getaran berbasis pada kombinasi antara analisis statistik dan FFT dengan algoritma pembelajaran mesin (ANN, Logistic Regression, dan SVM)
|
||||
|
||||
\item Mengurangi waktu latih dan kompleksitas kalkulasi dengan analisis statistik sebagai input data meliputi sembilan fitur: mean, median, min, max, kurtosis, \textit{skewness}, \textit{standard deviation}, and \textit{range}.
|
||||
\end{enumerate}
|
||||
|
||||
&
|
||||
\begin{enumerate}[series=enum2]
|
||||
\item Nilai \textit{maximum} dan kurtosis adalah fitur yang paling signifikan untuk mengklasifikasi kelas label pembelajaran mesin.
|
||||
|
||||
\item ANN meraih akurasi 100\% pada input FFT penuh dan analisis statistik, sedangkan Regresi Logistik (LR) dan SVM meraih akurasi 100\% dengan input FFT penuh namun hanya mendapat akurasi 91\% dengan input analisis statistik
|
||||
\end{enumerate}
|
||||
|
||||
&
|
||||
\begin{enumerate}
|
||||
\item Lorem
|
||||
\item Ipsum
|
||||
\end{enumerate}
|
||||
|
||||
%-------------page break----------------
|
||||
% \\
|
||||
% &
|
||||
% &
|
||||
% &
|
||||
% &
|
||||
% &
|
||||
% &
|
||||
% &
|
||||
% \begin{enumerate}[resume=enum]
|
||||
% \item Menyajikan berbagai perkembangan penelitian, mendiskusikan dan membandingkannya kelebihan dan kekurangannya
|
||||
% \item Meringkas kesesuaian berbagai metode pembelajaran mesin untuk masalah SHM yang berbeda
|
||||
% \item Terakhir, tren masa depan
|
||||
% \end{enumerate}
|
||||
% &
|
||||
% \begin{enumerate}[resume=enum2]
|
||||
% \item SVM dan hutan acak kurang mendapat perhatian dibandingkan dengan jaringan saraf. Ini digunakan untuk klasifikasi kerusakan. Namun, pemrosesan awal data jauh lebih rumit.
|
||||
|
||||
% \end{enumerate}
|
||||
509
latex/appendix/summary_related_paper.tex
Normal file
509
latex/appendix/summary_related_paper.tex
Normal file
@@ -0,0 +1,509 @@
|
||||
\documentclass[12pt,a4paper]{report}
|
||||
\usepackage{hyperref}
|
||||
\usepackage[top=1cm,right=3cm,bottom=1cm,left=3cm]{geometry}
|
||||
\usepackage{multirow}
|
||||
\usepackage{array}
|
||||
% \usepackage{makecell}
|
||||
\usepackage{pdflscape}
|
||||
\usepackage{longtable,booktabs}
|
||||
\usepackage{colortbl,xcolor}
|
||||
\usepackage{enumitem}
|
||||
\usepackage{pdfpages}
|
||||
\usepackage{caption}
|
||||
\usepackage[bahasa]{babel}
|
||||
\usepackage{xpatch,csquotes}
|
||||
\usepackage[backend=biber]{biblatex}
|
||||
\addbibresource{export.bib}
|
||||
\DeclareSourcemap{
|
||||
\maps[datatype = bibtex]{
|
||||
\map{
|
||||
\step[fieldsource = abstract,
|
||||
match = \regexp{([^\\])\%},
|
||||
replace = \regexp{\$1\\\%}]
|
||||
}
|
||||
}
|
||||
}
|
||||
% \usepackage{tablefootnote}
|
||||
% \usepackage{showframe}
|
||||
\definecolor{Gray}{gray}{0.95}
|
||||
\newcolumntype{a}{>{\columncolor{Gray}}p}
|
||||
\renewcommand{\thefootnote}{\textit{\alph{footnote}}}
|
||||
% \newcolumntype{b}{>{\raggedright\arraybackslash}p}
|
||||
|
||||
\title{Tugas 2 \\ Metode Penelitian}
|
||||
\author{Rifqi Damar Panuluh \\ 20210110224}
|
||||
|
||||
\begin{document}
|
||||
\maketitle
|
||||
\begin{landscape}
|
||||
% Table generated by Excel2LaTeX from sheet 'Sheet1'
|
||||
% \begin{table}[h]
|
||||
\centering
|
||||
\begin{longtable}{
|
||||
>{\raggedleft\arraybackslash}p{0.02\linewidth} %1
|
||||
>{\raggedright\arraybackslash}a{0.1\linewidth} %2
|
||||
% >{\raggedright\arraybackslash}p{0.1\linewidth} %3
|
||||
% >{\raggedright\arraybackslash}a{0.075\linewidth} %4
|
||||
% p{0.065\linewidth} %5
|
||||
% >{\raggedleft\arraybackslash}p{0.05\linewidth} %6
|
||||
>{\raggedright\arraybackslash}p{0.25\linewidth} %7
|
||||
>{\raggedright\arraybackslash}a{0.25\linewidth} %8
|
||||
>{\raggedright\arraybackslash}p{0.25\linewidth} %9
|
||||
}
|
||||
|
||||
\caption{Tinjauan pustaka, topik: pemanfaatan data getaran untuk monitor kesehatan struktur jembatan}
|
||||
\label{tab:my_label}
|
||||
\\
|
||||
\toprule
|
||||
\toprule
|
||||
\rowcolor{white}
|
||||
No. %1
|
||||
&
|
||||
Judul %2
|
||||
% &
|
||||
% Nama Penulis %3
|
||||
% &
|
||||
% Nama Jurnal %4
|
||||
% &
|
||||
% Sumber %5
|
||||
% &
|
||||
% Tahun %6
|
||||
&
|
||||
Tujuan Penelitian %7
|
||||
&
|
||||
Kesimpulan %8
|
||||
&
|
||||
Gap Research %9
|
||||
|
||||
\\\midrule
|
||||
\endfirsthead
|
||||
\toprule
|
||||
\rowcolor{white}
|
||||
No. %1
|
||||
&
|
||||
Judul %2
|
||||
% &
|
||||
% Nama Penulis %3
|
||||
% &
|
||||
% Nama Jurnal %4
|
||||
% &
|
||||
% Sumber %5
|
||||
% &
|
||||
% Tahun %6
|
||||
&
|
||||
Tujuan Penelitian %7
|
||||
&
|
||||
Kesimpulan %8
|
||||
|
||||
\\\midrule
|
||||
\endhead
|
||||
\midrule
|
||||
\multicolumn{4}{r}{\textit{berlanjut di halaman berikutnya}}
|
||||
\endfoot
|
||||
\bottomrule
|
||||
\bottomrule
|
||||
\endlastfoot
|
||||
|
||||
%-----1
|
||||
\input{important/van2020}
|
||||
\\
|
||||
%-----2
|
||||
\input{important/abdeljaber2017}
|
||||
\\
|
||||
%------3
|
||||
\\
|
||||
3
|
||||
|
||||
& %Judul Jurnal
|
||||
Real-time nondestructive structural health monitoring using support vector machines and wavelets (Ahmet Bulut; Ambuj K. Singh; Peter Shin; Tony Fountain; Hector Jasso; Linjun Yan; Ahmed Elgamal)
|
||||
|
||||
%for mult rows
|
||||
|
||||
% & %Author
|
||||
% Ahmet Bulut; Ambuj K. Singh; Peter Shin; Tony Fountain; Hector Jasso; Linjun Yan; Ahmed Elgamal
|
||||
|
||||
%for mult rows
|
||||
|
||||
% & %Nama Jurnal
|
||||
% Case Studies in Construction Materials 13 (2020) e00406
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Sumber
|
||||
% SPIE
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tahun
|
||||
% 2005
|
||||
|
||||
%for mult rows
|
||||
|
||||
& %Tujuan penelitian
|
||||
Eksplorasi efektivitas SVM dalam deteksi kerusakan; Validasi model SVM dengan data nyata jembatan
|
||||
|
||||
& %Kesimpulan
|
||||
\begin{enumerate} [series=enum]
|
||||
\item SVM menunjukkan akurasi tinggi dalam mengidentifikasi lokasi kerusakan
|
||||
\item Rekomendasi untuk penyetelan parameter SVM
|
||||
\end{enumerate}
|
||||
|
||||
|
||||
|
||||
|
||||
%-----------4
|
||||
\\
|
||||
4
|
||||
|
||||
& %Judul Jurnal
|
||||
A novel approach of Structural Health Monitoring by the application of FFT and wavelet transform using an index of frequency dispersion (Fragkiskos P. Pentaris; John Stonham; John P. Makris)
|
||||
|
||||
%for mult rows
|
||||
|
||||
% & %Author
|
||||
% Fragkiskos P. Pentaris; John Stonham; John P. Makris
|
||||
|
||||
%for mult rows
|
||||
|
||||
% & %Nama Jurnal
|
||||
% International Journal of Geology
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Sumber
|
||||
% Research Gate
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tahun
|
||||
% 2013
|
||||
|
||||
%for mult rows
|
||||
|
||||
& %Tujuan penelitian
|
||||
\begin{enumerate}
|
||||
\item Memeriksa peran FFT dalam pemrosesan awal data getaran
|
||||
\item Menilai dampak FFT terhadap keakuratan deteksi kerusakan
|
||||
\end{enumerate}
|
||||
|
||||
& %Kesimpulan
|
||||
\begin{enumerate} [series=enum]
|
||||
\item FFT meningkatkan rasio \textit{signal-to-noise} dan meningkatkan deteksi kerusakan.
|
||||
\item Menyarankan integrasi dengan algoritme lain untuk meningkatkan akurasi.
|
||||
\end{enumerate}
|
||||
|
||||
\\ %-------------page break----------------
|
||||
|
||||
|
||||
|
||||
|
||||
%-----------4
|
||||
\\
|
||||
5
|
||||
|
||||
& %Judul Jurnal
|
||||
Review of Vibration-Based Structural Health Monitoring Using Deep Learning (Gyungmin Toh; Junhong Park)
|
||||
|
||||
%for mult rows
|
||||
|
||||
% & %Author
|
||||
% Gyungmin Toh;
|
||||
% Junhong Park
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Nama Jurnal
|
||||
% Apllied Sciences
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Sumber
|
||||
% MDPI
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tahun
|
||||
% 2020
|
||||
|
||||
%for mult rows
|
||||
|
||||
& %Tujuan penelitian
|
||||
\begin{enumerate}
|
||||
\item ringkasan studi penerapan algoritma pembelajaran mesin untuk kesalahan pemantauan (\textit{monitoring}) menggunakan faktor getaran untuk mengkategorikan penelitian.
|
||||
\item Menyediakan interpretasi singkat tentang jaringan saraf dalam untuk pengaplikasian lebih lanjut dalam analisis getaran struktural.
|
||||
\end{enumerate}
|
||||
|
||||
& %Kesimpulan
|
||||
\begin{enumerate} [series=enum]
|
||||
\item Deep learning has the advantage of being able to perform health monitoring on complex structures with high accuracy.
|
||||
\end{enumerate}
|
||||
%-------------page break----------------
|
||||
|
||||
|
||||
|
||||
|
||||
%-----------4
|
||||
\\
|
||||
6
|
||||
|
||||
& %Judul Jurnal
|
||||
A deep learning approach to condition monitoring of cantilever beams via time-frequency extended signatures (Habil. Darian M. Onchis)
|
||||
|
||||
%for mult rows
|
||||
|
||||
% & %Author
|
||||
% Habil. Darian M. Onchis
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Nama Jurnal
|
||||
% Computers in Industry
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Sumber
|
||||
% Science Direct
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tahun
|
||||
% 2019
|
||||
|
||||
%for mult rows
|
||||
|
||||
& %Tujuan penelitian
|
||||
\begin{enumerate}
|
||||
\item ringkasan studi penerapan algoritma pembelajaran mesin untuk kesalahan pemantauan (\textit{monitoring}) menggunakan faktor getaran untuk mengkategorikan penelitian.
|
||||
\item Menyediakan interpretasi singkat tentang jaringan saraf dalam untuk pengaplikasian lebih lanjut dalam analisis getaran struktural.
|
||||
\end{enumerate}
|
||||
|
||||
& %Kesimpulan
|
||||
\begin{enumerate} [series=enum]
|
||||
\item Deep learning has the advantage of being able to perform health monitoring on complex structures with high accuracy.
|
||||
\end{enumerate}
|
||||
|
||||
\\ %-------------page break----------------
|
||||
|
||||
|
||||
% %------------5
|
||||
% 5
|
||||
|
||||
% & %Judul Jurnal
|
||||
% Advances and development trends in eco-friendly pavements
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Author
|
||||
% Aimin Sha, Zhuangzhuang Liu, Wei Jiang, Lin Qi, Liqun Hu, Wenxiu Jiao ,Diego Maria Barbieri
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Nama Jurnal
|
||||
% Journal of Road Engineering 1 (2021)
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Sumber
|
||||
% ScienceDirect
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tahun
|
||||
% 2021
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tujuan penelitian
|
||||
% Mengembangkan solusi teknis untuk mengatasi tantangan yang terkait dengan penciptaan infrastruktur hijau dan berkelanjutan, misalnya, pengurangan dampak lingkungan, peningkatan keselamatan lalu lintas, dan efisiensi transportasi, dll.\cite{Sha2021}
|
||||
% &
|
||||
% \begin{enumerate} [series=enum]
|
||||
% \item Temuan penelitian terbaru terkait jalan ramah lingkungan
|
||||
% trotoar diringkas dan dibahas sesuai dengan enam kunci yang berbeda
|
||||
% karakteristik: permeabel, pengurangan kebisingan, luminescence diri, knalpot
|
||||
% dekomposisi, penyerapan panas rendah serta \textit{anti-icing} / \textit{de-icing}.\cite{Sha2021}
|
||||
% \end{enumerate}
|
||||
% \\
|
||||
% & %Judul Jurnal
|
||||
% Advances and development trends in eco-friendly pavements
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Author
|
||||
% Aimin Sha, Zhuangzhuang Liu, Wei Jiang, Lin Qi, Liqun Hu, Wenxiu Jiao ,Diego Maria Barbieri
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Nama Jurnal
|
||||
% Journal of Road Engineering 1 (2021)
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Sumber
|
||||
% ScienceDirect
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tahun
|
||||
% 2021
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tujuan penelitian
|
||||
% Mengembangkan solusi teknis untuk mengatasi tantangan yang terkait dengan penciptaan infrastruktur hijau dan berkelanjutan, misalnya, pengurangan dampak lingkungan, peningkatan keselamatan lalu lintas, dan efisiensi transportasi, dll.\cite{Sha2021}
|
||||
% &
|
||||
% \begin{enumerate}[resume=enum]
|
||||
% \item Teknologi ini dapat memecahkan beberapa tantangan utama yang terkait dengan konstruksi jalan dan lalu lintas (misalnya, kebisingan, efek pulau panas, dan pembangkitan polusi). Sebagian besar solusi saat ini hanya tersedia menampilkan satu fungsi ramah lingkungan pada satu waktu.\cite{Sha2021}
|
||||
% \end{enumerate}
|
||||
|
||||
% %-----------5
|
||||
% \\
|
||||
% 5
|
||||
|
||||
% & %Judul Jurnal
|
||||
% Micromobility injury events: Motor vehicle crashes and other transportation systems factors
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Author
|
||||
% Kevin Fang
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Nama Jurnal
|
||||
% Transportation Research Interdisciplinary Perspectives 14 (2022) 100574
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Sumber
|
||||
% ScienceDirect
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tahun
|
||||
% 2022
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tujuan penelitian
|
||||
% Menginformasikan transportasi strategi kebijakan untuk mencoba dan meningkatkan kinerja keselamatan, Dengan cara mengeksplorasi keadaan di mana cedera pengendara mikromobilitas mengalami cederanya, dengan fokus pada faktor-faktor yang berkaitan dengan sistem transportasi.\cite{Fang2022}
|
||||
% &
|
||||
% \begin{enumerate} [series=enum]
|
||||
% \item Kecelakaan kendaraan bermotor secara mengejutkan menjulang sebagai sesuatu yang kemungkinan adalah faktor umum dalam cedera mikromobilitas. Masalah perkerasan, konflik
|
||||
% dengan pengguna non-otomatis, dan medan juga muncul sebagai faktor cedera yang terukur.\cite{Fang2022}
|
||||
% \end{enumerate}
|
||||
% \\
|
||||
% & %Judul Jurnal
|
||||
% Micromobility injury events: Motor vehicle crashes and other transportation systems factors
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Author
|
||||
% Kevin Fang
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Nama Jurnal
|
||||
% Transportation Research Interdisciplinary Perspectives 14 (2022) 100574
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Sumber
|
||||
% ScienceDirect
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tahun
|
||||
% 2022
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tujuan penelitian
|
||||
% Menginformasikan transportasi strategi kebijakan untuk mencoba dan meningkatkan kinerja keselamatan, Dengan cara mengeksplorasi keadaan di mana cedera pengendara mikromobilitas mengalami cederanya, dengan fokus pada faktor-faktor yang berkaitan dengan sistem transportasi.\cite{Fang2022}
|
||||
% &
|
||||
% \begin{enumerate} [resume=enum]
|
||||
% \item Di antara faktor-faktor yang berhubungan dengan transportasi, analisis regresi
|
||||
% menunjukkan bahwa terluka dalam kecelakaan kendaraan bermotor atau di medan berbukit
|
||||
% sesuai dengan kemungkinan yang lebih besar dari rawat inap dan cedera kepala.\cite{Fang2022}
|
||||
% \end{enumerate}
|
||||
% \\
|
||||
% & %Judul Jurnal
|
||||
% Micromobility injury events: Motor vehicle crashes and other transportation systems factors
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Author
|
||||
% Kevin Fang
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Nama Jurnal
|
||||
% Transportation Research Interdisciplinary Perspectives 14 (2022) 100574
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Sumber
|
||||
% ScienceDirect
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tahun
|
||||
% 2022
|
||||
|
||||
% %for mult rows
|
||||
|
||||
% & %Tujuan penelitian
|
||||
% Menginformasikan transportasi strategi kebijakan untuk mencoba dan meningkatkan kinerja keselamatan, Dengan cara mengeksplorasi keadaan di mana cedera pengendara mikromobilitas mengalami cederanya, dengan fokus pada faktor-faktor yang berkaitan dengan sistem transportasi.\cite{Fang2022}
|
||||
% &
|
||||
% \begin{enumerate} [resume=enum]
|
||||
% \item Mitigasi yang berhasil yang memaksimalkan kinerja mode keselamatan mikromobilitas dapat membantu menarik dan mempertahankan pengguna dan menjaga kepercayaan dari pembuat kebijakan yang peduli keselamatan.\cite{Fang2022}
|
||||
% \end{enumerate}
|
||||
% \end{tabular}
|
||||
\end{longtable}
|
||||
% \end{table}
|
||||
\end{landscape}
|
||||
\clearpage
|
||||
\pagenumbering{roman}
|
||||
\setcounter{page}{2}
|
||||
\thispagestyle{empty}
|
||||
\printbibliography
|
||||
|
||||
\clearpage
|
||||
\begin{titlepage}
|
||||
\
|
||||
\vfill
|
||||
\centering\noindent \Huge{LAMPIRAN}
|
||||
\vfill
|
||||
\
|
||||
\end{titlepage}
|
||||
|
||||
|
||||
% \clearpage
|
||||
% \thispagestyle{empty}
|
||||
% \centering
|
||||
% \frame{\includegraphics[page=1,scale=.7]{assets/1-s2.0-S2095756420300295-main.pdf}}
|
||||
% \captionof{figure}{Halaman pertama jurnal pertama}
|
||||
|
||||
% \clearpage
|
||||
% \thispagestyle{empty}
|
||||
% \centering
|
||||
% \frame{\includegraphics[page=1,scale=.7]{assets/1-s2.0-S2214509520300024-main.pdf}}
|
||||
% \captionof{figure}{Halaman pertama jurnal kedua}
|
||||
|
||||
% \clearpage
|
||||
% \thispagestyle{empty}
|
||||
% \centering
|
||||
% \frame{\includegraphics[page=1,scale=.7]{assets/1-s2.0-S2214509520300784-main.pdf}}
|
||||
% \captionof{figure}{Halaman pertama jurnal ketiga}
|
||||
|
||||
% \clearpage
|
||||
% \thispagestyle{empty}
|
||||
% \centering
|
||||
% \frame{\includegraphics[page=1,scale=.7]{assets/1-s2.0-S2097049821000044-main.pdf}}
|
||||
% \captionof{figure}{Halaman pertama jurnal keempat}
|
||||
|
||||
% \clearpage
|
||||
% \thispagestyle{empty}
|
||||
% \centering
|
||||
% \frame{\includegraphics[page=1,scale=.7]{assets/1-s2.0-S2590198222000379-main.pdf}}
|
||||
% \captionof{figure}{Halaman pertama jurnal kelima}
|
||||
\end{document}
|
||||
@@ -0,0 +1,25 @@
|
||||
\chapter{PENDAHULUAN}
|
||||
|
||||
\section{Latar Belakang}
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc consequat lectus dolor, a commodo odio suscipit nec. Aliquam posuere elit eget tellus dapibus, auctor ornare mi porttitor. Donec auctor aliquet nisl, quis convallis ligula rutrum id. Duis tortor ipsum, scelerisque vestibulum viverra eu, maximus vel mi. Nullam volutpat nunc et varius tempor. Vivamus convallis mi eros, aliquam semper dui tincidunt a. Morbi nunc dui, accumsan ac arcu nec, condimentum efficitur mauris. Etiam sed mauris semper, volutpat justo eu, placerat mauris. Suspendisse at erat eu arcu gravida mattis et id nunc. Aliquam malesuada magna odio, ac dictum erat vestibulum a. Mauris vel nisi sit amet elit tempor bibendum sit amet a velit. Morbi dignissim facilisis placerat.\par
|
||||
|
||||
\begin{figure}
|
||||
\centering
|
||||
\includegraphics[width=0.5\linewidth]{frontmatter/img/slice1.jpg}
|
||||
\caption{Enter Caption}
|
||||
\label{fig:enter-label}
|
||||
\end{figure}
|
||||
|
||||
Pellentesque vel accumsan lorem, id vulputate metus. Nulla mollis orci ante, et euismod erat venenatis eget. Proin tempus lobortis feugiat. Fusce vitae sem quis lacus iaculis dignissim ut eget turpis. Vivamus ut nisl in enim porttitor fringilla vel et mauris. Mauris quis porttitor magna. Pellentesque molestie viverra arcu at tincidunt. Maecenas non elit arcu.\par
|
||||
|
||||
Etiam feugiat enim sit amet tortor interdum lobortis. Curabitur elementum faucibus sapien. Morbi eget facilisis lorem. In sed suscipit metus. Etiam porttitor, libero sit amet sodales hendrerit, libero dolor hendrerit nulla, sed convallis risus leo posuere metus. Cras gravida ac elit viverra ultrices. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Maecenas dictum urna elit, nec eleifend nulla mattis sit amet. Pellentesque suscipit metus vitae leo suscipit, a vehicula quam pretium. Sed eu est ut risus convallis hendrerit a vulputate justo. Nulla sollicitudin quam ut risus euismod, quis consequat dui mattis. Mauris id eros varius, pellentesque quam quis, venenatis tellus. Nulla vitae condimentum nisl. Vestibulum suscipit scelerisque dui, non posuere purus finibus nec. Nulla ultrices felis quis vestibulum porta. Suspendisse potenti.\par
|
||||
|
||||
Nam tempus tincidunt interdum. Pellentesque at ligula ac massa semper efficitur vitae non ante. Suspendisse potenti. Cras vitae interdum erat, nec facilisis urna. Nulla commodo porttitor tellus non posuere. Vestibulum tristique ut urna quis porttitor. Sed pellentesque lectus sit amet ultrices aliquam. Aliquam erat volutpat. Nam dictum eu erat a mollis. Donec eget nulla vel risus aliquet suscipit sed at libero.\par
|
||||
|
||||
|
||||
|
||||
Maecenas hendrerit pharetra bibendum. Donec ut tortor ac augue aliquam ullamcorper nec id eros. Quisque consectetur elementum ipsum vitae posuere. Sed ultricies ipsum nibh, vitae volutpat neque bibendum at. Morbi dictum metus eu bibendum malesuada. Nam scelerisque purus erat, id dictum nisl pretium vitae. Curabitur finibus commodo dui ac molestie. In sed sem ac dui dapibus ullamcorper. Aenean molestie nulla eu lorem maximus hendrerit. Vivamus viverra velit dolor, in vehicula eros facilisis at. Vivamus in rhoncus sem.
|
||||
\section{Lingkup Penelitian}
|
||||
\section{Tujuan Penelitian}
|
||||
\section{Manfaat Penelitian}
|
||||
% \subsubsection{Dolor}
|
||||
10
latex/chapters/en/02_literature_review/index.tex
Normal file
10
latex/chapters/en/02_literature_review/index.tex
Normal file
@@ -0,0 +1,10 @@
|
||||
\chapter{LITERATURE REVIEW AND THEORETICAL FOUNDATION}
|
||||
\section{Literature Review}
|
||||
\input{chapters/id/02_literature_review/literature_review/abdeljaber2017}
|
||||
|
||||
\section{Theoretical Foundation}
|
||||
\input{chapters/id/02_literature_review/theoritical_foundation/stft}
|
||||
\input{chapters/id/02_literature_review/theoritical_foundation/machine_learning}
|
||||
|
||||
\bigskip
|
||||
These theoretical foundations provide the methodological framework for implementing and evaluating the proposed damage localization system in this research. The combination of time-frequency analysis using STFT and classical machine learning classifiers enables an efficient and interpretable approach to structural health monitoring.
|
||||
@@ -0,0 +1,6 @@
|
||||
Traditional structural health monitoring methods often rely on hand-crafted features and manually tuned classifiers, which pose challenges in terms of generalization, reliability, and computational efficiency. As highlighted by [Author(s), Year], these approaches frequently require a trial-and-error process for feature and classifier selection, which not only reduces their robustness across structures but also hinders their deployment in real-time applications due to the computational load of the feature extraction phase.
|
||||
|
||||
[Author(s), Year] introduced a CNN-based structural damage detection approach validated through a large-scale grandstand simulator at Qatar University. The structure, designed to replicate modern stadiums, was equipped with 30 accelerometers and subjected to controlled damage by loosening beam-to-girder bolts. Acceleration data, collected under band-limited white noise excitation and sampled at 1024 Hz, were segmented into 128-sample frames for training localized 1D CNNs—one per joint—creating a decentralized detection system. Across two experimental phases, involving both partial and full-structure monitoring, the method demonstrated high accuracy in damage localization, achieving a training classification error of just 0.54\%. While performance remained strong even under double-damage scenarios, some misclassifications occurred in symmetric or adjacent damage cases. Overall, the proposed method presents a highly efficient and accurate solution for real-time SHM applications.
|
||||
|
||||
In the context of this thesis, the dataset and experimental setup introduced by [Author(s), Year] form the foundation for comparative analysis and algorithm testing. The authors have not only demonstrated the efficacy of a compact 1D CNN-based system for vibration-based structural damage detection, but also highlighted the value of using output-only acceleration data—a constraint shared in this thesis’s methodology. The decentralized design of their system, which allows each CNN to process only locally available data, is particularly aligned with this thesis's focus on efficient, sensor-level data analysis without requiring full-system synchronization. Furthermore, since the authors indicate plans to publicly release their dataset and source code, this thesis leverages that open data for applying alternative analysis methods such as support vector machines (SVM) or frequency domain feature extraction techniques, allowing a direct performance comparison between classical and deep learning-based SHM approaches. Thus, this work serves as both a benchmark reference and a data source in the development and evaluation of more accessible, lower-complexity alternatives for structural health monitoring systems.
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
\subsection{Classification Algorithms}
|
||||
|
||||
This research evaluates five classical machine learning algorithms to perform the classification task of damage localization. Each algorithm has different strengths and limitations, and their performance is benchmarked to identify the most suitable one for the given dataset.
|
||||
|
||||
\subsubsection{Support Vector Machine (SVM)}
|
||||
|
||||
SVM is a supervised learning algorithm that seeks an optimal hyperplane that separates data into classes with maximum margin. SVM performs well in high-dimensional spaces and is robust to overfitting, especially in cases with a clear margin of separation.
|
||||
|
||||
SVM is appropriate for vibration signal classification due to its capability to handle nonlinear decision boundaries when equipped with kernel functions.
|
||||
|
||||
\subsubsection{K-Nearest Neighbors (KNN)}
|
||||
|
||||
KNN is a non-parametric, instance-based learning algorithm. It classifies a new data point based on the majority vote of its $k$ nearest neighbors in the feature space. Although simple, KNN can be effective when the data is well-distributed and class boundaries are smooth.
|
||||
|
||||
Its performance is sensitive to the choice of $k$ and distance metric. For high-dimensional data like STFT features, dimensionality reduction or careful scaling may be required.
|
||||
|
||||
\subsubsection{Decision Tree (DT)}
|
||||
|
||||
Decision Tree is a rule-based classifier that splits data into classes using feature thresholds. It builds a tree where each internal node represents a feature, each branch a decision rule, and each leaf a class label. DTs are easy to interpret and can capture non-linear relationships.
|
||||
|
||||
However, they are prone to overfitting, especially with noisy or small datasets.
|
||||
|
||||
\subsubsection{Random Forest (RF)}
|
||||
|
||||
Random Forest is an ensemble learning method based on constructing multiple decision trees during training and outputting the mode of the classes for classification. It improves the generalization capability of individual trees and reduces overfitting.
|
||||
|
||||
RF is suitable for damage detection as it provides robustness to noise and variance, making it ideal for real-world sensor data.
|
||||
|
||||
\subsubsection{Naïve Bayes (NB)}
|
||||
|
||||
Naïve Bayes is a probabilistic classifier based on Bayes' theorem, assuming feature independence. Despite its simplicity, it often performs well in high-dimensional problems and with small datasets.
|
||||
|
||||
NB is particularly effective when class-conditional independence holds approximately, which may occur when STFT features are well-separated in distribution.
|
||||
@@ -0,0 +1,11 @@
|
||||
\subsection{Short-Time Fourier Transform (STFT)}
|
||||
|
||||
The Short-Time Fourier Transform (STFT) is a fundamental technique used to analyze non-stationary signals, such as those generated by structures under dynamic load or white noise excitation. While the traditional Fourier Transform provides frequency-domain information, it lacks time resolution. STFT overcomes this limitation by applying the Fourier Transform over short overlapping segments of the signal, thereby producing a time-frequency representation.
|
||||
|
||||
Mathematically, the STFT of a signal $x(t)$ is given by:
|
||||
\begin{equation}
|
||||
X(t, \omega) = \int_{-\infty}^{\infty} x(\tau) w(\tau - t) e^{-j \omega \tau} d\tau
|
||||
\end{equation}
|
||||
where $w(\tau - t)$ is a window function centered at time $t$, and $\omega$ is the angular frequency.
|
||||
|
||||
In this study, the STFT is employed to extract the time-frequency features of the vibration signals collected from the structure. These features are then used as inputs to machine learning classifiers. This process captures localized frequency content over time, which is crucial in identifying structural changes due to damage.
|
||||
7
latex/chapters/id/02_literature_review/index.tex
Normal file
7
latex/chapters/id/02_literature_review/index.tex
Normal file
@@ -0,0 +1,7 @@
|
||||
\chapter{TINJAUAN PUSTAKA DAN LANDASAN TEORI}
|
||||
\section{Tinjauan Pustaka}
|
||||
\input{chapters/id/02_literature_review/abdeljaber2017.tex}
|
||||
|
||||
\section{Dasar Teori}
|
||||
\input{chapters/id/theoritical_foundation/stft.tex}
|
||||
\input{chapters/id/theoritical_foundation/machine_learning.tex}
|
||||
@@ -0,0 +1,3 @@
|
||||
Metode monitor kesehatan struktur (SHM) tradisional sering kali mengandalkan fitur yang dibuat secara manual dan pengklasifikasi (\textit{classifier}) yang diatur secara manual, yang menimbulkan tantangan dalam hal generalisasi, keandalan, dan efisiensi komputasi. Seperti yang disorot oleh \textcite{abdeljaber2017}, pendekatan-pendekatan ini umumnya memerlukan proses \textit{trial-and-error} dalam pemilihan fitur dan pengklasifikasi yang tidak hanya mengurangi ketangguhan metode tersebut di berbagai jenis struktur, tetapi juga menghambat penerapannya dalam pengaplikasian secara \textit{real-time} karena beban komputasi pada fase ekstraksi fitur.
|
||||
|
||||
\textcite{abdeljaber2017} memperkenalkan pendekatan deteksi kerusakan struktur berbasis CNN yang divalidasi melalui \textit{large-scale grandstand simulator} di Qatar University. Struktur tersebut dirancang untuk mereplikasi stadion modern, dilengkapi dengan 30 akselerometer, dan dikenai kerusakan terkontrol melalui pelonggaran baut sambungan antara balok dan gelagar. Data percepatan yang dikumpulkan di bawah eksitasi \textit{band-limited white noise} dan disampel pada 1024 Hz, kemudian dibagi menjadi bingkai berukuran 128 sampel untuk melatih 1-D CNN yang dilokalkan—satu untuk setiap sambungan (\textit{joint})—menciptakan sistem deteksi terdesentralisasi. Dalam dua fase (skenario) eksperimen, yang melibatkan pemantauan sebagian dan seluruh struktur, metode ini menunjukkan akurasi tinggi dalam pelokalisasian kerusakan, dengan kesalahan klasifikasi saat pelatihan hanya sebesar 0.54\%. Meskipun performa tetap andal bahkan dalam skenario kerusakan ganda, beberapa salah klasifikasi terjadi pada kasus kerusakan yang simetris atau berdekatan. Secara keseluruhan, metode yang diusulkan ini menawarkan solusi yang sangat efisien dan akurat untuk aplikasi SHM secara \textit{real-time}.
|
||||
@@ -0,0 +1,13 @@
|
||||
Metode monitor kesehatan struktur (SHM) tradisional sering kali mengandalkan fitur yang dibuat secara manual dan pengklasifikasi (\textit{classifier}) yang diatur secara manual, yang menimbulkan tantangan dalam hal generalisasi, keandalan, dan efisiensi komputasi. Seperti yang disorot oleh \textcite{abdeljaber2017}, pendekatan-pendekatan ini umumnya memerlukan proses \textit{trial-and-error} dalam pemilihan fitur dan pengklasifikasi yang tidak hanya mengurangi ketangguhan metode tersebut di berbagai jenis struktur, tetapi juga menghambat penerapannya dalam pengaplikasian secara \textit{real-time} karena beban komputasi pada fase ekstraksi fitur.
|
||||
|
||||
\textcite{abdeljaber2017} memperkenalkan pendekatan deteksi kerusakan struktur berbasis CNN yang divalidasi melalui \textit{large-scale grandstand simulator} di Qatar University. Struktur tersebut dirancang untuk mereplikasi stadion modern, dilengkapi dengan 30 akselerometer, dan dikenai kerusakan terkontrol melalui pelonggaran baut sambungan antara balok dan gelagar. Data percepatan yang dikumpulkan di bawah eksitasi \textit{band-limited white noise} dan disampel pada 1024 Hz, kemudian dibagi menjadi bingkai berukuran 128 sampel untuk melatih 1-D CNN yang dilokalkan—satu untuk setiap sambungan (\textit{joint})—menciptakan sistem deteksi terdesentralisasi. Dalam dua fase (skenario) eksperimen, yang melibatkan pemantauan sebagian dan seluruh struktur, metode ini menunjukkan akurasi tinggi dalam pelokalisasian kerusakan, dengan kesalahan klasifikasi saat pelatihan hanya sebesar 0.54\%. Meskipun performa tetap andal bahkan dalam skenario kerusakan ganda, beberapa salah klasifikasi terjadi pada kasus kerusakan yang simetris atau berdekatan. Secara keseluruhan, metode yang diusulkan ini menawarkan solusi yang sangat efisien dan akurat untuk aplikasi SHM secara\textit{real-time}.
|
||||
|
||||
\indent Metode berbasis getaran merupakan salah satu teknik paling umum dalam sistem pemantauan kesehatan struktur (SHM) karena kemampuannya dalam mendeteksi perubahan kondisi struktur secara non-destruktif. Pendekatan ini bergantung pada prinsip bahwa kerusakan pada suatu struktur, seperti kelonggaran sambungan atau penurunan kekakuan elemen, akan mengubah karakteristik dinamikanya, seperti frekuensi alami, bentuk mode, dan respons getaran terhadap eksitasi tertentu.
|
||||
|
||||
\indent Salah satu jenis kerusakan struktural yang umum dijumpai dalam sambungan mekanis adalah kelonggaran baut akibat beban dinamis berulang, seperti getaran atau kejutan. Kondisi ini dapat menyebabkan penurunan integritas struktur dan berujung pada kegagalan sistem jika tidak terdeteksi sejak dini. Oleh karena itu, deteksi kelonggaran baut secara dini telah menjadi perhatian utama dalam bidang teknik sipil, mesin, maupun dirgantara [1, 11].
|
||||
|
||||
\indent Teknik deteksi berbasis getaran terbukti efektif dalam mengidentifikasi tanda-tanda awal kelonggaran sambungan. Hal ini dilakukan dengan menganalisis perubahan spektrum frekuensi atau energi getaran antar kondisi sehat dan rusak. Dalam praktiknya, data getaran biasanya dikumpulkan melalui akselerometer yang dipasang pada titik-titik tertentu dalam struktur. Perubahan karakteristik getaran, seperti penurunan amplitudo, pergeseran frekuensi dominan, atau pola spektral lainnya, menjadi indikator keberadaan dan lokasi kerusakan.
|
||||
|
||||
\indent Sejumlah penelitian telah menerapkan teknik ini dalam konteks struktur kompleks seperti sambungan multi-baut atau grid struktural. Misalnya, studi oleh Zhao et al. [10] menunjukkan bahwa perubahan rotasi kepala baut akibat kelonggaran dapat dikaitkan dengan pola getaran tertentu. Sementara itu, pendekatan yang lebih umum dalam domain teknik sipil adalah memanfaatkan sinyal akselerasi dari sambungan kolom atau balok sebagai masukan untuk sistem klasifikasi kerusakan berbasis pembelajaran mesin [12].
|
||||
|
||||
\indent Kelebihan utama dari pendekatan berbasis getaran dibanding metode visual atau inspeksi manual adalah kemampuannya dalam mendeteksi kerusakan mikro secara lebih dini, bahkan sebelum tampak secara fisik. Namun, tantangan tetap ada, terutama dalam penempatan sensor yang optimal, pemrosesan sinyal, dan interpretasi pola dinamik yang kompleks dalam struktur grid. Oleh karena itu, kombinasi antara teknik transformasi sinyal seperti Short-Time Fourier Transform (STFT) dan algoritma pembelajaran mesin menjadi arah baru yang menjanjikan dalam riset SHM masa kini.
|
||||
@@ -0,0 +1 @@
|
||||
\subsection{Machine Learning}
|
||||
@@ -0,0 +1 @@
|
||||
\subsection{Short-Time Fourier Transform}
|
||||
65
latex/frontmatter/approval.tex
Normal file
65
latex/frontmatter/approval.tex
Normal file
@@ -0,0 +1,65 @@
|
||||
% frontmatter/endorsement.tex
|
||||
\setmainfont{Times New Roman}
|
||||
\addcontentsline{toc}{chapter}{LEMBAR PERSETUJUAN TUGAS AKHIR}
|
||||
|
||||
\begin{center}
|
||||
\textbf{\Large LEMBAR PERSETUJUAN TUGAS AKHIR} \\[0.5em]
|
||||
\textit{APPROVAL SHEET}
|
||||
\end{center}
|
||||
|
||||
\vspace{1em}
|
||||
|
||||
\renewcommand{\arraystretch}{1.2}
|
||||
\begin{tabular}{llp{10cm}}
|
||||
\textbf{Judul} & : & \thesistitle \\
|
||||
\textit{Title} & & \\
|
||||
|
||||
\textbf{Mahasiswa} & : & \studentname \\
|
||||
\textit{Student} & & \\
|
||||
|
||||
\textbf{Nomor Mahasiswa} & : &\studentid \\
|
||||
\textit{Student ID.} & & \\
|
||||
|
||||
\textbf{Dosen Pembimbing} & : & 1. \firstadvisor \\
|
||||
\textit{Advisors} & & 2. \secondadvisor
|
||||
\end{tabular}
|
||||
|
||||
\vspace{1em}
|
||||
\textbf{Telah disetujui oleh Tim Penguji:} \\
|
||||
\textit{Approved by the Committee on Oral Examination}
|
||||
|
||||
\vspace{1em}
|
||||
\begin{tabular}{lp{5cm}}
|
||||
\textbf{\firstadvisor} &:
|
||||
% \vspace{2cm} % signature space
|
||||
% \\[1em] % pull up next row
|
||||
\\
|
||||
\textit{Ketua Tim Penguji} &
|
||||
\noindent\makebox[5cm]{\hrulefill}\\[-0.5em]
|
||||
\textit{\small Chair} & \small Yogyakarta, \dotfill 2020
|
||||
\\
|
||||
\textbf{\secondadvisor} &:
|
||||
% \vspace{2cm} % signature space
|
||||
% \\[1em] % pull up next row
|
||||
\\
|
||||
\textit{Ketua Tim Penguji} &
|
||||
\noindent\makebox[5cm]{\hrulefill}\\[-0.5em]
|
||||
\textit{\small Chair} & \small Yogyakarta, \dotfill 2020 \\
|
||||
\end{tabular}
|
||||
|
||||
\vspace{1em}
|
||||
\noindent
|
||||
\textbf{Diterima dan disetujui sebagai persyaratan untuk memperoleh gelar Sarjana Teknik} \\
|
||||
\textit{Accepted in partial fulfillment of the requirements for the degree of Bachelor of Engineering}
|
||||
|
||||
\vspace{2em}
|
||||
\begin{center}
|
||||
\textbf{Ketua Program Studi} \\
|
||||
\textit{Head of Department}
|
||||
\end{center}
|
||||
|
||||
\vspace{3em}
|
||||
\begin{center}
|
||||
\textbf{\headdepartement} \\
|
||||
NIK. \headdepartementid
|
||||
\end{center}
|
||||
7
latex/frontmatter/endorsement.tex
Normal file
7
latex/frontmatter/endorsement.tex
Normal file
@@ -0,0 +1,7 @@
|
||||
% frontmatter/endorsement.tex
|
||||
\setmainfont{Times New Roman}
|
||||
|
||||
\chapter*{LEMBAR PENGESAHAN TUGAS AKHIR}
|
||||
\begin{center}
|
||||
{\normalsize\textit{ENDORSEMENT SHEET}}
|
||||
\end{center}
|
||||
BIN
latex/frontmatter/img/logo.png
Normal file
BIN
latex/frontmatter/img/logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 253 KiB |
31
latex/frontmatter/maketitle.tex
Normal file
31
latex/frontmatter/maketitle.tex
Normal file
@@ -0,0 +1,31 @@
|
||||
\begin{titlepage}
|
||||
\centering
|
||||
\vspace*{1cm}
|
||||
|
||||
{\fontsize{14pt}{16pt}\selectfont \textbf{\MakeUppercase{Tugas Akhir}}\par}
|
||||
\vspace{1.5cm}
|
||||
|
||||
{\fontsize{14pt}{16pt}\selectfont \textbf{\MakeUppercase{\thesistitle}}\par}
|
||||
\vspace{1.5cm}
|
||||
|
||||
\includegraphics[width=5cm]{frontmatter/img/logo.png}
|
||||
\vspace{1.5cm}
|
||||
|
||||
|
||||
\textbf{Disusun oleh:} \\
|
||||
{\fontsize{14pt}{16pt}\selectfont \textbf{\studentname}} \\
|
||||
{\fontsize{14pt}{16pt}\selectfont \textbf{\studentid}} \\
|
||||
|
||||
|
||||
\vfill
|
||||
|
||||
{\fontsize{12pt}{14pt}\selectfont
|
||||
\textbf{\program} \\
|
||||
\textbf{\faculty} \\
|
||||
\textbf{\university} \\
|
||||
\textbf{\yearofsubmission}
|
||||
}
|
||||
|
||||
\end{titlepage}%
|
||||
|
||||
|
||||
40
latex/frontmatter/originality.tex
Normal file
40
latex/frontmatter/originality.tex
Normal file
@@ -0,0 +1,40 @@
|
||||
% frontmatter/originality.tex
|
||||
\setmainfont{Times New Roman}
|
||||
|
||||
\chapter*{PERNYATAAN KEASLIAN}
|
||||
\addcontentsline{toc}{chapter}{PERNYATAAN KEASLIAN}
|
||||
% \begin{center}
|
||||
% {\normalsize\textit{ORIGINALITY STATEMENT}}
|
||||
% \end{center}
|
||||
Saya yang bertanda tangan di bawah ini:
|
||||
\renewcommand{\arraystretch}{1.2}
|
||||
\begin{center}
|
||||
\begin{tabular}{llp{10cm}}
|
||||
\textbf{Nama} & : & \studentname \\
|
||||
\textbf{Nomor Mahasiswa} & : & \studentid \\
|
||||
\textbf{Judul} & : & \thesistitle \\
|
||||
\end{tabular}
|
||||
\end{center}
|
||||
Menyatakan dengan sebenarnya bahwa tugas akhir ini merupakan karya says sendiri. Apabila terdapat karya orang lain yang saya kutip, maka saya akan mencantumkan sumber secara jelas. Jika dikemudian hari ditemukan dengan ketidakbenaran dalam pernyataan in, maka saya bersedia menerima sanksi dengan aturan yang berlaku. Demikian pernyataan ini saya buat tanpa ada paksaan dari pihak mana pun.\par
|
||||
|
||||
\begin{flushright}
|
||||
\begin{minipage}{0.5\textwidth}
|
||||
\centering
|
||||
Yogyakarta, \today \\[1em]
|
||||
Yang membuat pernyataan, \\[0.5cm] % space for signature
|
||||
% Materai box
|
||||
\hspace*{-2cm}% shift the box slightly left
|
||||
\begin{tabular}{@{}c@{}}
|
||||
\fbox{
|
||||
\begin{minipage}[c][2cm][c]{2.5cm}
|
||||
\centering
|
||||
Materai\\
|
||||
6000
|
||||
\end{minipage}
|
||||
}
|
||||
\end{tabular}
|
||||
\\[1cm]
|
||||
% \rule{6cm}{0.4pt} % signature line
|
||||
Rifqi Damar Panuluh
|
||||
\end{minipage}
|
||||
\end{flushright}
|
||||
@@ -0,0 +1,40 @@
|
||||
\documentclass[draftmark]{thesis}
|
||||
|
||||
% Title Information
|
||||
\setthesisinfo
|
||||
{Prediksi Lokasi Kerusakan dengan Machine Learning}
|
||||
{Rifqi Damar Panuluh}
|
||||
{20210110224}
|
||||
{PROGRAM STUDI TEKNIK SIPIL}
|
||||
{FAKULTAS TEKNIK}
|
||||
{UNIVERSITAS MUHAMMADIYAH YOGYAKARTA}
|
||||
{2025}
|
||||
|
||||
% Input preamble
|
||||
\input{preamble/packages}
|
||||
% \input{preamble/fonts}
|
||||
\input{preamble/macros}
|
||||
|
||||
\begin{document}
|
||||
|
||||
\maketitle
|
||||
\frontmatter
|
||||
\input{frontmatter/approval}\clearpage
|
||||
\input{frontmatter/originality}\clearpage
|
||||
\input{frontmatter/acknowledgement}\clearpage
|
||||
\tableofcontents
|
||||
\clearpage
|
||||
\mainmatter
|
||||
\pagestyle{fancyplain}
|
||||
% Include content
|
||||
\include{content/abstract}
|
||||
\include{content/introduction}
|
||||
\include{chapters/01_introduction}
|
||||
\include{content/chapter2}
|
||||
\include{content/conclusion}
|
||||
|
||||
% Bibliography
|
||||
% \bibliographystyle{IEEEtran}
|
||||
% \bibliography{references}
|
||||
|
||||
\end{document}
|
||||
11
latex/metadata.tex
Normal file
11
latex/metadata.tex
Normal file
@@ -0,0 +1,11 @@
|
||||
\newcommand{\studentname}{Rifqi Damar Panuluh}
|
||||
\newcommand{\studentid}{20210110224}
|
||||
\newcommand{\thesistitle}{Prediksi Lokasi Kerusakan dengan Machine Learning}
|
||||
\newcommand{\firstadvisor}{Ir. Muhammad Ibnu Syamsi, Ph.D.}
|
||||
\newcommand{\secondadvisor}{}
|
||||
\newcommand{\headdepartement}{Puji Harsanto, S.T. M.T., Ph.D.}
|
||||
\newcommand{\headdepartementid}{19740607201404123064}
|
||||
\newcommand{\faculty}{Fakultas Teknik}
|
||||
\newcommand{\program}{Teknik Sipil}
|
||||
\newcommand{\university}{Universitas Muhammadiyah Yogyakarta}
|
||||
\newcommand{\yearofsubmission}{2025}
|
||||
5
latex/preamble/macros.tex
Normal file
5
latex/preamble/macros.tex
Normal file
@@ -0,0 +1,5 @@
|
||||
\newcommand{\eg}{\textit{e.g.},\ }
|
||||
\newcommand{\ie}{\textit{i.e.},\ }
|
||||
\newcommand{\etal}{\textit{et al.}}
|
||||
|
||||
\let\oldtableofcontents\tableofcontents % backup
|
||||
3
latex/preamble/packages.tex
Normal file
3
latex/preamble/packages.tex
Normal file
@@ -0,0 +1,3 @@
|
||||
\usepackage{amsmath, amssymb, siunitx}
|
||||
\usepackage{caption}
|
||||
\usepackage{subcaption}
|
||||
156
latex/thesis.cls
Normal file
156
latex/thesis.cls
Normal file
@@ -0,0 +1,156 @@
|
||||
\NeedsTeXFormat{LaTeX2e}
|
||||
\ProvidesClass{thesis}[2025/05/10 Bachelor Thesis Class]
|
||||
|
||||
\newif\if@draftmark
|
||||
\@draftmarkfalse
|
||||
|
||||
\DeclareOption{draftmark}{\@draftmarktrue}
|
||||
\ProcessOptions \relax
|
||||
\LoadClass[a4paper,12pt,oneside]{book}
|
||||
|
||||
% Load common packages
|
||||
\RequirePackage{polyglossia}
|
||||
\RequirePackage{fontspec}
|
||||
\RequirePackage{titlesec}
|
||||
\RequirePackage{fancyhdr}
|
||||
\RequirePackage{geometry}
|
||||
\RequirePackage{setspace}
|
||||
\RequirePackage{graphicx}
|
||||
\RequirePackage{hyperref}
|
||||
\RequirePackage{etoolbox}
|
||||
\RequirePackage{tocloft}
|
||||
\RequirePackage{tocbibind}
|
||||
|
||||
% Polyglossia set language
|
||||
\setmainlanguage{bahasai}
|
||||
% \setotherlanguage{english}
|
||||
|
||||
% Conditionally load the watermark package and settings
|
||||
\if@draftmark
|
||||
\RequirePackage{draftwatermark}
|
||||
\SetWatermarkText{Draft: \today [wip]}
|
||||
\SetWatermarkColor[gray]{0.7}
|
||||
\SetWatermarkFontSize{2cm}
|
||||
\SetWatermarkAngle{90}
|
||||
\SetWatermarkHorCenter{1.5cm}
|
||||
\fi
|
||||
|
||||
% Page layout
|
||||
\geometry{left=3cm, top=3cm, right=3cm, bottom=3cm}
|
||||
\setlength{\parskip}{0.5em}
|
||||
\setlength{\parindent}{0pt}
|
||||
\onehalfspacing
|
||||
|
||||
% Fonts
|
||||
\defaultfontfeatures{Ligatures=TeX}
|
||||
\setmainfont{Times New Roman}
|
||||
\setsansfont{Arial}
|
||||
\setmonofont{Courier New}
|
||||
|
||||
% Metadata commands
|
||||
\input{metadata}
|
||||
|
||||
\newcommand{\setthesisinfo}[7]{%
|
||||
\renewcommand{\thesistitle}{#1}%
|
||||
\renewcommand{\studentname}{#2}%
|
||||
\renewcommand{\studentid}{#3}%
|
||||
\renewcommand{\program}{#4}%
|
||||
\renewcommand{\faculty}{#5}%
|
||||
\renewcommand{\university}{#6}%
|
||||
\renewcommand{\yearofsubmission}{#7}%
|
||||
}
|
||||
|
||||
% % Header and footer
|
||||
\fancypagestyle{fancy}{%
|
||||
\fancyhf{}
|
||||
\fancyhead[R]{\nouppercase{\rightmark}}
|
||||
\fancyhead[L]{\nouppercase{\leftmark}}
|
||||
\fancyfoot[C]{\thepage}
|
||||
}
|
||||
\fancypagestyle{fancyplainfrontmatter}{%
|
||||
\renewcommand{\headrulewidth}{0pt}
|
||||
\fancyfoot[C]{\thepage}
|
||||
}
|
||||
\fancypagestyle{fancyplain}{%
|
||||
\fancyhf{}
|
||||
\renewcommand{\headrulewidth}{0pt}
|
||||
\fancyhead[R]{\thepage}
|
||||
}
|
||||
|
||||
% Chapter formatting
|
||||
\titlespacing{\chapter}{0pt}{0pt}{*1.5}
|
||||
\titleformat{\chapter}[display]
|
||||
{\normalsize\bfseries\centering}
|
||||
{BAB~\Roman{chapter}} % << display format
|
||||
{1ex}
|
||||
{\MakeUppercase}
|
||||
\titleformat{\section}
|
||||
{\normalsize\bfseries}{\thesection}{1em}{}
|
||||
|
||||
\titleformat{\subsection}
|
||||
{\normalsize\bfseries}{\thesubsection}{1em}{}
|
||||
|
||||
% Ensure chapter reference in TOC matches
|
||||
\renewcommand{\cftchappresnum}{BAB~}
|
||||
\renewcommand{\cftchapaftersnum}{\quad}
|
||||
|
||||
% \titlespacing*{\chapter}{0pt}{-10pt}{20pt}
|
||||
|
||||
% Redefine \maketitle
|
||||
\renewcommand{\maketitle}{\input{frontmatter/maketitle}}
|
||||
|
||||
% Chapter & Section format
|
||||
\renewcommand{\cftchapfont}{\normalsize\MakeUppercase}
|
||||
% \renewcommand{\cftsecfont}{}
|
||||
% \renewcommand{\cftsubsecfont}{\itshape}
|
||||
% \renewcommand{\thesection}{\textup{\Roman{chapter}}.\arabic{section}}
|
||||
|
||||
|
||||
% Dot leaders, spacing, indentation
|
||||
\setlength{\cftbeforechapskip}{0em}
|
||||
\setlength{\cftchapindent}{0pt}
|
||||
\setlength{\cftsecindent}{0em}
|
||||
\setlength{\cftsubsecindent}{2.5em}
|
||||
\setlength{\cftchapnumwidth}{3.5em}
|
||||
\setlength{\cftsecnumwidth}{3.5em}
|
||||
\setlength{\cftsubsecnumwidth}{2.5em}
|
||||
\setlength{\cftfignumwidth}{5em}
|
||||
\setlength{\cfttabnumwidth}{4em}
|
||||
\renewcommand \cftchapdotsep{4.5} % https://tex.stackexchange.com/a/273764
|
||||
\renewcommand{\cftchapleader}{\normalfont\cftdotfill{\cftsecdotsep}}
|
||||
\renewcommand{\cftchappagefont}{\normalfont}
|
||||
\renewcommand{\cftfigpresnum}{\figurename~}
|
||||
\renewcommand{\cfttabpresnum}{\tablename~}
|
||||
|
||||
% Ensure TOC and References Respect Custom Numbering
|
||||
\renewcommand{\thechapter}{\Roman{chapter}}
|
||||
\renewcommand\thesection{\arabic{chapter}.\arabic{section}}
|
||||
|
||||
% Change figure numbering to include chapter (e.g., Figure 1.1, 1.2...)
|
||||
\renewcommand{\thefigure}{\arabic{chapter}.\arabic{figure}}
|
||||
\renewcommand{\thetable}{\arabic{chapter}.\arabic{table}}
|
||||
\renewcommand{\theequation}{\arabic{chapter}.\arabic{equation}}
|
||||
|
||||
% Table of Content (TOC) Title styling
|
||||
\renewcommand{\cfttoctitlefont}{\hfill\bfseries\MakeUppercase}
|
||||
\renewcommand{\cftaftertoctitle}{\hfill} % https://tex.stackexchange.com/a/255699/394075
|
||||
% List of Figures (LOF) Title styling
|
||||
\renewcommand{\cftloftitlefont}{\hfill\bfseries\MakeUppercase}
|
||||
\renewcommand{\cftafterloftitle}{\hfill}
|
||||
% List of Tables (LOT) Title styling
|
||||
\renewcommand{\cftlottitlefont}{\hfill\bfseries\MakeUppercase}
|
||||
\renewcommand{\cftafterlottitle}{\hfill}
|
||||
% \renewcommand{\cfttoctitlefont}{\bfseries\MakeUppercase}
|
||||
% \renewcommand{\cftaftertoctitle}{\vskip 2em}
|
||||
|
||||
% % Apply a custom fancyhdr layout only on the first page of each \chapter, and use no header/footer elsewhere
|
||||
% % \let\oldchapter\chapter
|
||||
% % \renewcommand{\chapter}{%
|
||||
% % \cleardoublepage
|
||||
% % \pagestyle{fancyplainchapter}%
|
||||
% % \oldchapter
|
||||
% % \thispagestyle{fancyplainchapter} % ensure chapter start page uses it
|
||||
% % \pagestyle{fancyplain}% switch for subsequent pages
|
||||
% % }
|
||||
|
||||
\endinput
|
||||
Reference in New Issue
Block a user