mirror of
https://github.com/jung-geun/PSO.git
synced 2025-12-20 04:50:45 +09:00
23-07-07
dev container 설정 - tqdm + tensorflow 자동 설치 env name = pso 로 자동 생성
This commit is contained in:
@@ -1,30 +1,25 @@
|
||||
# %%
|
||||
import os
|
||||
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
|
||||
|
||||
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
|
||||
|
||||
import tensorflow as tf
|
||||
|
||||
tf.random.set_seed(777) # for reproducibility
|
||||
|
||||
from tensorflow import keras
|
||||
from keras.datasets import mnist
|
||||
from keras.models import Sequential
|
||||
from keras.layers import Dense, Dropout, Flatten
|
||||
from keras.layers import Conv2D, MaxPooling2D
|
||||
from keras import backend as K
|
||||
|
||||
# from pso_tf import PSO
|
||||
from pso import Optimizer
|
||||
import gc
|
||||
from datetime import date
|
||||
|
||||
import numpy as np
|
||||
|
||||
from datetime import date
|
||||
from keras import backend as K
|
||||
from keras.datasets import mnist
|
||||
from keras.layers import Conv2D, Dense, Dropout, Flatten, MaxPooling2D
|
||||
from keras.models import Sequential
|
||||
from tensorflow import keras
|
||||
from tqdm import tqdm
|
||||
|
||||
import gc
|
||||
from pso import Optimizer
|
||||
|
||||
# print(tf.__version__)
|
||||
# print(tf.config.list_physical_devices())
|
||||
# print(f"Num GPUs Available: {len(tf.config.list_physical_devices('GPU'))}")
|
||||
|
||||
def get_data():
|
||||
(x_train, y_train), (x_test, y_test) = mnist.load_data()
|
||||
@@ -37,26 +32,30 @@ def get_data():
|
||||
print(f"x_test : {x_test[0].shape} | y_test : {y_test[0].shape}")
|
||||
return x_train, y_train, x_test, y_test
|
||||
|
||||
|
||||
def get_data_test():
|
||||
(x_train, y_train), (x_test, y_test) = mnist.load_data()
|
||||
x_test = x_test.reshape((10000, 28, 28, 1))
|
||||
|
||||
|
||||
return x_test, y_test
|
||||
|
||||
|
||||
def make_model():
|
||||
model = Sequential()
|
||||
model.add(Conv2D(32, kernel_size=(5, 5),
|
||||
activation='relu', input_shape=(28, 28, 1)))
|
||||
model.add(
|
||||
Conv2D(32, kernel_size=(5, 5), activation="relu", input_shape=(28, 28, 1))
|
||||
)
|
||||
model.add(MaxPooling2D(pool_size=(3, 3)))
|
||||
model.add(Conv2D(64, kernel_size=(3, 3), activation='relu'))
|
||||
model.add(Conv2D(64, kernel_size=(3, 3), activation="relu"))
|
||||
model.add(MaxPooling2D(pool_size=(2, 2)))
|
||||
model.add(Dropout(0.25))
|
||||
model.add(Flatten())
|
||||
model.add(Dense(128, activation='relu'))
|
||||
model.add(Dense(10, activation='softmax'))
|
||||
model.add(Dense(128, activation="relu"))
|
||||
model.add(Dense(10, activation="softmax"))
|
||||
|
||||
return model
|
||||
|
||||
|
||||
# %%
|
||||
model = make_model()
|
||||
x_test, y_test = get_data_test()
|
||||
@@ -67,12 +66,23 @@ x_test, y_test = get_data_test()
|
||||
# loss = 'poisson'
|
||||
# loss = 'cosine_similarity'
|
||||
# loss = 'log_cosh'
|
||||
# loss = 'huber_loss'
|
||||
# loss = 'huber_loss'
|
||||
# loss = 'mean_absolute_error'
|
||||
# loss = 'mean_absolute_percentage_error'
|
||||
# loss = 'mean_squared_error'
|
||||
|
||||
loss = ['mse', 'categorical_crossentropy', 'binary_crossentropy', 'kullback_leibler_divergence', 'poisson', 'cosine_similarity', 'log_cosh', 'huber_loss', 'mean_absolute_error', 'mean_absolute_percentage_error']
|
||||
loss = [
|
||||
"mse",
|
||||
"categorical_crossentropy",
|
||||
"binary_crossentropy",
|
||||
"kullback_leibler_divergence",
|
||||
"poisson",
|
||||
"cosine_similarity",
|
||||
"log_cosh",
|
||||
"huber_loss",
|
||||
"mean_absolute_error",
|
||||
"mean_absolute_percentage_error",
|
||||
]
|
||||
n_particles = [50, 75, 100]
|
||||
c0 = [0.25, 0.35, 0.45, 0.55]
|
||||
c1 = [0.5, 0.6, 0.7, 0.8, 0.9]
|
||||
@@ -93,31 +103,31 @@ if __name__ == "__main__":
|
||||
for n_s in negative_swarm:
|
||||
pso_mnist = Optimizer(
|
||||
model,
|
||||
loss=loss_,
|
||||
loss=loss_,
|
||||
n_particles=n,
|
||||
c0=c_0,
|
||||
c1=c_1,
|
||||
c0=c_0,
|
||||
c1=c_1,
|
||||
w_min=w_m,
|
||||
w_max=w_M,
|
||||
negative_swarm=n_s
|
||||
)
|
||||
negative_swarm=n_s,
|
||||
)
|
||||
|
||||
best_score = pso_mnist.fit(
|
||||
x_test,
|
||||
y_test,
|
||||
epochs=200,
|
||||
save=True,
|
||||
save_path="./result/mnist",
|
||||
renewal="acc",
|
||||
save_path="./result/mnist",
|
||||
renewal="acc",
|
||||
empirical_balance=False,
|
||||
Dispersion=False,
|
||||
check_point=25
|
||||
)
|
||||
|
||||
Dispersion=False,
|
||||
check_point=25,
|
||||
)
|
||||
|
||||
del pso_mnist
|
||||
gc.collect()
|
||||
gc.collect()
|
||||
tf.keras.backend.clear_session()
|
||||
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("KeyboardInterrupt")
|
||||
finally:
|
||||
|
||||
Reference in New Issue
Block a user