Nguyen Thi Dieu Hien commited on
Commit
29937fc
·
unverified ·
1 Parent(s): 7475a6d

Update optimize_bilstm.py

Browse files
Files changed (1) hide show
  1. optimize_bilstm.py +9 -9
optimize_bilstm.py CHANGED
@@ -20,11 +20,11 @@ session = InteractiveSession(config=config)
20
 
21
  """### **Load data**"""
22
 
23
- # Load dữ liệu từ file pickle
24
  with open('data/features_162k_phobertbase.pkl', 'rb') as f:
25
  data_dict = pickle.load(f)
26
 
27
- # Trích xuất các đặc trưng và nhãn từ dictionary
28
  X_train = np.array(data_dict['X_train'])
29
  X_val = np.array(data_dict['X_val'])
30
  X_test = np.array(data_dict['X_test'])
@@ -42,20 +42,20 @@ y_val = y_val.values.astype(int)
42
  def build_bilstm_model(lstm_units_1, lstm_units_2, dense_units, dropout_rate, learning_rate):
43
  model = Sequential()
44
  model.add(Input(shape=(X_train.shape[1], X_train.shape[2])))
45
- # Lớp LSTM 1 với dropout
46
  model.add(Bidirectional(LSTM(lstm_units_1, return_sequences=True)))
47
  model.add(Dropout(dropout_rate))
48
- # Lớp LSTM 2 với dropout
49
  model.add(Bidirectional(LSTM(lstm_units_2, return_sequences=False)))
50
  model.add(Dropout(dropout_rate))
51
- # Lớp Dense với dropout kích hoạt ReLU
52
  model.add(Dense(dense_units, activation='relu'))
53
  model.add(Dropout(dropout_rate))
54
- # Lớp Dense cuối cùng với kích hoạt softmax
55
  model.add(Dense(y_train.shape[1], activation='softmax'))
56
- # Sử dụng tối ưu hóa Adam với learning rate được truyền vào
57
  optimizer = Adam(learning_rate=learning_rate)
58
- # Biên soạn mô hình
59
  model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
60
 
61
  return model
@@ -106,4 +106,4 @@ plot_optimization_history(study_bilstm)
106
  html_file_path = "images/study_bilstm_phobertbase_optimize_history.html"
107
  # Plot and save the optimization history plot as an HTML file
108
  ov.plot_optimization_history(study_bilstm).write_html(html_file_path)
109
- plot_optimization_history(study_bilstm)
 
20
 
21
  """### **Load data**"""
22
 
23
+ # Load data
24
  with open('data/features_162k_phobertbase.pkl', 'rb') as f:
25
  data_dict = pickle.load(f)
26
 
27
+
28
  X_train = np.array(data_dict['X_train'])
29
  X_val = np.array(data_dict['X_val'])
30
  X_test = np.array(data_dict['X_test'])
 
42
  def build_bilstm_model(lstm_units_1, lstm_units_2, dense_units, dropout_rate, learning_rate):
43
  model = Sequential()
44
  model.add(Input(shape=(X_train.shape[1], X_train.shape[2])))
45
+ # LSTM Layer 1 with dropout
46
  model.add(Bidirectional(LSTM(lstm_units_1, return_sequences=True)))
47
  model.add(Dropout(dropout_rate))
48
+ # LSTM Layer 2 with dropout
49
  model.add(Bidirectional(LSTM(lstm_units_2, return_sequences=False)))
50
  model.add(Dropout(dropout_rate))
51
+ # Dense Layer with dropout and ReLU activation
52
  model.add(Dense(dense_units, activation='relu'))
53
  model.add(Dropout(dropout_rate))
54
+ # Final Dense Layer with softmax activation
55
  model.add(Dense(y_train.shape[1], activation='softmax'))
56
+ # Use Adam optimizer with the specified learning rate
57
  optimizer = Adam(learning_rate=learning_rate)
58
+ # Compile the model
59
  model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])
60
 
61
  return model
 
106
  html_file_path = "images/study_bilstm_phobertbase_optimize_history.html"
107
  # Plot and save the optimization history plot as an HTML file
108
  ov.plot_optimization_history(study_bilstm).write_html(html_file_path)
109
+ plot_optimization_history(study_bilstm)