tensorboard 적용된 pypi
log 저장 디렉토리 지정해야 tensorboard 사용 가능
pypi 소스파일 제거
tqdm 자동으로 변경
This commit is contained in:
jung-geun
2023-07-17 11:50:45 +09:00
parent 768d3ccee7
commit fbecda4b89
13 changed files with 328 additions and 64 deletions

View File

@@ -23,5 +23,5 @@ jobs:
TWINE_USERNAME: __token__ TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
run: | run: |
python setup.py sdist bdist_wheel python setup.py bdist_wheel
twine upload dist/* twine upload dist/*.whl

View File

@@ -20,6 +20,6 @@ jobs:
echo $CONDA/bin >> $GITHUB_PATH echo $CONDA/bin >> $GITHUB_PATH
- name: Install dependencies - name: Install dependencies
run: | run: |
conda env create --file environment.yaml --name pso conda env create --file conda_env/environment.yaml --name pso
conda activate pso conda activate pso
python mnist.py python mnist.py

View File

@@ -38,6 +38,8 @@ conda env create -f ./conda_env/environment.yaml
### 파일 구조 ### 파일 구조
```plain text ```plain text
|-- /conda_env # conda 환경 설정 파일
| |-- environment.yaml # conda 환경 설정 파일
|-- /metacode # pso 기본 코드 |-- /metacode # pso 기본 코드
| |-- pso_bp.py # 오차역전파 함수를 최적화하는 PSO 알고리즘 구현 - 성능이 99% 이상으로 나오나 목적과 다름 | |-- pso_bp.py # 오차역전파 함수를 최적화하는 PSO 알고리즘 구현 - 성능이 99% 이상으로 나오나 목적과 다름
| |-- pso_meta.py # PSO 기본 알고리즘 구현 | |-- pso_meta.py # PSO 기본 알고리즘 구현
@@ -46,18 +48,15 @@ conda env create -f ./conda_env/environment.yaml
| |-- __init__.py # pso 모듈을 사용하기 위한 초기화 파일 | |-- __init__.py # pso 모듈을 사용하기 위한 초기화 파일
| |-- optimizer.py # pso 알고리즘 이용을 위한 기본 코드 | |-- optimizer.py # pso 알고리즘 이용을 위한 기본 코드
| |-- particle.py # 각 파티클의 정보 및 위치를 저장하는 코드 | |-- particle.py # 각 파티클의 정보 및 위치를 저장하는 코드
|-- examples.py # psokeras 코드를 이용한 예제 |-- xor.py # pso 를 이용한 xor 문제 풀이
|-- xor.ipynb # pso 를 이용한 xor 문제 풀이
|-- iris.py # pso 를 이용한 iris 문제 풀이 |-- iris.py # pso 를 이용한 iris 문제 풀이
|-- iris_tf.py # tensorflow 를 이용한 iris 문제 풀이 |-- iris_tf.py # tensorflow 를 이용한 iris 문제 풀이
|-- mnist.py # pso 를 이용한 mnist 문제 풀이 |-- mnist.py # pso 를 이용한 mnist 문제 풀이
|-- mnist_tf.py # tensorflow 를 이용한 mnist 문제 풀이
|-- plt.ipynb # pyplot 으로 학습 결과를 그래프로 표현 |-- plt.ipynb # pyplot 으로 학습 결과를 그래프로 표현
|-- env.yaml # conda 환경 설정 파일 |-- README.md # 현재 파일
|-- readme.md # 현재 파일
``` ```
psokeras 및 pyswarms 라이브러리는 외부 라이브러리이기에 코드를 수정하지 않았습니다
pso 라이브러리는 tensorflow 모델을 학습하기 위해 기본 ./metacode/pso_meta.py 코드에서 수정하였습니다 [2] pso 라이브러리는 tensorflow 모델을 학습하기 위해 기본 ./metacode/pso_meta.py 코드에서 수정하였습니다 [2]
## 2. PSO 알고리즘을 이용한 최적화 문제 풀이 ## 2. PSO 알고리즘을 이용한 최적화 문제 풀이

View File

@@ -40,7 +40,7 @@ def load_data():
model = make_model() model = make_model()
x_train, x_test, y_train, y_test = load_data() x_train, x_test, y_train, y_test = load_data()
loss = ["categorical_crossentropy", 'mean_squared_error'] loss = ["categorical_crossentropy", "mean_squared_error"]
pso_iris = Optimizer( pso_iris = Optimizer(
model, model,
@@ -60,11 +60,11 @@ best_score = pso_iris.fit(
x_train, x_train,
y_train, y_train,
epochs=200, epochs=200,
save=True, save_info=True,
log=2,
log_name="iris",
save_path="./result/iris", save_path="./result/iris",
renewal="acc", renewal="acc",
empirical_balance=False,
Dispersion=False,
check_point=25, check_point=25,
) )

255
mnist.ipynb Normal file
View File

@@ -0,0 +1,255 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import sys\n",
"\n",
"os.environ[\"TF_CPP_MIN_LOG_LEVEL\"] = \"2\"\n",
"\n",
"import gc\n",
"\n",
"import tensorflow as tf\n",
"from keras.datasets import mnist\n",
"from keras.layers import Conv2D, Dense, Dropout, Flatten, MaxPooling2D\n",
"from keras.models import Sequential\n",
"from tensorflow import keras\n",
"\n",
"from pso import Optimizer\n",
"\n",
"\n",
"def get_data():\n",
" (x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
"\n",
" x_train, x_test = x_train / 255.0, x_test / 255.0\n",
" x_train = x_train.reshape((60000, 28, 28, 1))\n",
" x_test = x_test.reshape((10000, 28, 28, 1))\n",
"\n",
" y_train, y_test = tf.one_hot(y_train, 10), tf.one_hot(y_test, 10)\n",
"\n",
" print(f\"x_train : {x_train[0].shape} | y_train : {y_train[0].shape}\")\n",
" print(f\"x_test : {x_test[0].shape} | y_test : {y_test[0].shape}\")\n",
"\n",
" return x_train, y_train, x_test, y_test\n",
"\n",
"\n",
"def get_data_test():\n",
" (x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
" x_test = x_test / 255.0\n",
" x_test = x_test.reshape((10000, 28, 28, 1))\n",
"\n",
" y_test = tf.one_hot(y_test, 10)\n",
"\n",
" print(f\"x_test : {x_test[0].shape} | y_test : {y_test[0].shape}\")\n",
"\n",
" return x_test, y_test\n",
"\n",
"\n",
"def make_model():\n",
" model = Sequential()\n",
" model.add(\n",
" Conv2D(32, kernel_size=(5, 5), activation=\"relu\", input_shape=(28, 28, 1))\n",
" )\n",
" model.add(MaxPooling2D(pool_size=(3, 3)))\n",
" model.add(Conv2D(64, kernel_size=(3, 3), activation=\"relu\"))\n",
" model.add(MaxPooling2D(pool_size=(2, 2)))\n",
" model.add(Dropout(0.25))\n",
" model.add(Flatten())\n",
" model.add(Dense(128, activation=\"relu\"))\n",
" model.add(Dense(10, activation=\"softmax\"))\n",
"\n",
"\n",
" return model"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"x_test : (28, 28, 1) | y_test : (10,)\n",
"start running time : 20230716-194018\n"
]
},
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "04956808700d412f93bfed35ab8f83f8",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Initializing Particles: 0%| | 0/70 [00:00<?, ?it/s]"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"ename": "KeyboardInterrupt",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
"Cell \u001b[0;32mIn[2], line 55\u001b[0m\n\u001b[1;32m 38\u001b[0m x_train, y_train \u001b[39m=\u001b[39m get_data_test()\n\u001b[1;32m 40\u001b[0m loss \u001b[39m=\u001b[39m [\n\u001b[1;32m 41\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mmean_squared_error\u001b[39m\u001b[39m\"\u001b[39m,\n\u001b[1;32m 42\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mcategorical_crossentropy\u001b[39m\u001b[39m\"\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 51\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mmean_absolute_percentage_error\u001b[39m\u001b[39m\"\u001b[39m,\n\u001b[1;32m 52\u001b[0m ]\n\u001b[0;32m---> 55\u001b[0m pso_mnist \u001b[39m=\u001b[39m Optimizer(\n\u001b[1;32m 56\u001b[0m model,\n\u001b[1;32m 57\u001b[0m loss\u001b[39m=\u001b[39;49mloss[\u001b[39m0\u001b[39;49m],\n\u001b[1;32m 58\u001b[0m n_particles\u001b[39m=\u001b[39;49m\u001b[39m70\u001b[39;49m,\n\u001b[1;32m 59\u001b[0m c0\u001b[39m=\u001b[39;49m\u001b[39m0.3\u001b[39;49m,\n\u001b[1;32m 60\u001b[0m c1\u001b[39m=\u001b[39;49m\u001b[39m0.5\u001b[39;49m,\n\u001b[1;32m 61\u001b[0m w_min\u001b[39m=\u001b[39;49m\u001b[39m0.4\u001b[39;49m,\n\u001b[1;32m 62\u001b[0m w_max\u001b[39m=\u001b[39;49m\u001b[39m0.7\u001b[39;49m,\n\u001b[1;32m 63\u001b[0m negative_swarm\u001b[39m=\u001b[39;49m\u001b[39m0.1\u001b[39;49m,\n\u001b[1;32m 64\u001b[0m mutation_swarm\u001b[39m=\u001b[39;49m\u001b[39m0.2\u001b[39;49m,\n\u001b[1;32m 65\u001b[0m particle_min\u001b[39m=\u001b[39;49m\u001b[39m-\u001b[39;49m\u001b[39m5\u001b[39;49m,\n\u001b[1;32m 66\u001b[0m particle_max\u001b[39m=\u001b[39;49m\u001b[39m5\u001b[39;49m,\n\u001b[1;32m 67\u001b[0m )\n\u001b[1;32m 69\u001b[0m best_score \u001b[39m=\u001b[39m pso_mnist\u001b[39m.\u001b[39mfit(\n\u001b[1;32m 70\u001b[0m x_train,\n\u001b[1;32m 71\u001b[0m y_train,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 77\u001b[0m check_point\u001b[39m=\u001b[39m\u001b[39m25\u001b[39m,\n\u001b[1;32m 78\u001b[0m )\n\u001b[1;32m 80\u001b[0m \u001b[39mprint\u001b[39m(\u001b[39m\"\u001b[39m\u001b[39mDone!\u001b[39m\u001b[39m\"\u001b[39m)\n",
"File \u001b[0;32m/drive/samba/private_files/jupyter/PSO/pso/optimizer.py:94\u001b[0m, in \u001b[0;36mOptimizer.__init__\u001b[0;34m(self, model, loss, n_particles, c0, c1, w_min, w_max, negative_swarm, mutation_swarm, np_seed, tf_seed, particle_min, particle_max)\u001b[0m\n\u001b[1;32m 92\u001b[0m \u001b[39mprint\u001b[39m(\u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mstart running time : \u001b[39m\u001b[39m{\u001b[39;00m\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mday\u001b[39m}\u001b[39;00m\u001b[39m\"\u001b[39m)\n\u001b[1;32m 93\u001b[0m \u001b[39mfor\u001b[39;00m i \u001b[39min\u001b[39;00m tqdm(\u001b[39mrange\u001b[39m(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mn_particles), desc\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mInitializing Particles\u001b[39m\u001b[39m\"\u001b[39m):\n\u001b[0;32m---> 94\u001b[0m m \u001b[39m=\u001b[39m keras\u001b[39m.\u001b[39;49mmodels\u001b[39m.\u001b[39;49mmodel_from_json(model\u001b[39m.\u001b[39;49mto_json())\n\u001b[1;32m 95\u001b[0m init_weights \u001b[39m=\u001b[39m m\u001b[39m.\u001b[39mget_weights()\n\u001b[1;32m 97\u001b[0m w_, sh_, len_ \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_encode(init_weights)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/saving/legacy/model_config.py:109\u001b[0m, in \u001b[0;36mmodel_from_json\u001b[0;34m(json_string, custom_objects)\u001b[0m\n\u001b[1;32m 86\u001b[0m \u001b[39m\u001b[39m\u001b[39m\"\"\"Parses a JSON model configuration string and returns a model instance.\u001b[39;00m\n\u001b[1;32m 87\u001b[0m \n\u001b[1;32m 88\u001b[0m \u001b[39mUsage:\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 103\u001b[0m \u001b[39m A Keras model instance (uncompiled).\u001b[39;00m\n\u001b[1;32m 104\u001b[0m \u001b[39m\"\"\"\u001b[39;00m\n\u001b[1;32m 105\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mkeras\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mlayers\u001b[39;00m \u001b[39mimport\u001b[39;00m (\n\u001b[1;32m 106\u001b[0m deserialize_from_json,\n\u001b[1;32m 107\u001b[0m )\n\u001b[0;32m--> 109\u001b[0m \u001b[39mreturn\u001b[39;00m deserialize_from_json(json_string, custom_objects\u001b[39m=\u001b[39;49mcustom_objects)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/layers/serialization.py:275\u001b[0m, in \u001b[0;36mdeserialize_from_json\u001b[0;34m(json_string, custom_objects)\u001b[0m\n\u001b[1;32m 269\u001b[0m populate_deserializable_objects()\n\u001b[1;32m 270\u001b[0m config \u001b[39m=\u001b[39m json_utils\u001b[39m.\u001b[39mdecode_and_deserialize(\n\u001b[1;32m 271\u001b[0m json_string,\n\u001b[1;32m 272\u001b[0m module_objects\u001b[39m=\u001b[39mLOCAL\u001b[39m.\u001b[39mALL_OBJECTS,\n\u001b[1;32m 273\u001b[0m custom_objects\u001b[39m=\u001b[39mcustom_objects,\n\u001b[1;32m 274\u001b[0m )\n\u001b[0;32m--> 275\u001b[0m \u001b[39mreturn\u001b[39;00m deserialize(config, custom_objects)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/layers/serialization.py:252\u001b[0m, in \u001b[0;36mdeserialize\u001b[0;34m(config, custom_objects)\u001b[0m\n\u001b[1;32m 215\u001b[0m \u001b[39m\u001b[39m\u001b[39m\"\"\"Instantiates a layer from a config dictionary.\u001b[39;00m\n\u001b[1;32m 216\u001b[0m \n\u001b[1;32m 217\u001b[0m \u001b[39mArgs:\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 249\u001b[0m \u001b[39m```\u001b[39;00m\n\u001b[1;32m 250\u001b[0m \u001b[39m\"\"\"\u001b[39;00m\n\u001b[1;32m 251\u001b[0m populate_deserializable_objects()\n\u001b[0;32m--> 252\u001b[0m \u001b[39mreturn\u001b[39;00m serialization\u001b[39m.\u001b[39;49mdeserialize_keras_object(\n\u001b[1;32m 253\u001b[0m config,\n\u001b[1;32m 254\u001b[0m module_objects\u001b[39m=\u001b[39;49mLOCAL\u001b[39m.\u001b[39;49mALL_OBJECTS,\n\u001b[1;32m 255\u001b[0m custom_objects\u001b[39m=\u001b[39;49mcustom_objects,\n\u001b[1;32m 256\u001b[0m printable_module_name\u001b[39m=\u001b[39;49m\u001b[39m\"\u001b[39;49m\u001b[39mlayer\u001b[39;49m\u001b[39m\"\u001b[39;49m,\n\u001b[1;32m 257\u001b[0m )\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/saving/legacy/serialization.py:517\u001b[0m, in \u001b[0;36mdeserialize_keras_object\u001b[0;34m(identifier, module_objects, custom_objects, printable_module_name)\u001b[0m\n\u001b[1;32m 515\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39m\"\u001b[39m\u001b[39mcustom_objects\u001b[39m\u001b[39m\"\u001b[39m \u001b[39min\u001b[39;00m arg_spec\u001b[39m.\u001b[39margs:\n\u001b[1;32m 516\u001b[0m tlco \u001b[39m=\u001b[39m object_registration\u001b[39m.\u001b[39m_THREAD_LOCAL_CUSTOM_OBJECTS\u001b[39m.\u001b[39m\u001b[39m__dict__\u001b[39m\n\u001b[0;32m--> 517\u001b[0m deserialized_obj \u001b[39m=\u001b[39m \u001b[39mcls\u001b[39;49m\u001b[39m.\u001b[39;49mfrom_config(\n\u001b[1;32m 518\u001b[0m cls_config,\n\u001b[1;32m 519\u001b[0m custom_objects\u001b[39m=\u001b[39;49m{\n\u001b[1;32m 520\u001b[0m \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mobject_registration\u001b[39m.\u001b[39;49m_GLOBAL_CUSTOM_OBJECTS,\n\u001b[1;32m 521\u001b[0m \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mtlco,\n\u001b[1;32m 522\u001b[0m \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mcustom_objects,\n\u001b[1;32m 523\u001b[0m },\n\u001b[1;32m 524\u001b[0m )\n\u001b[1;32m 525\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m 526\u001b[0m \u001b[39mwith\u001b[39;00m object_registration\u001b[39m.\u001b[39mCustomObjectScope(custom_objects):\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/engine/sequential.py:481\u001b[0m, in \u001b[0;36mSequential.from_config\u001b[0;34m(cls, config, custom_objects)\u001b[0m\n\u001b[1;32m 477\u001b[0m \u001b[39mfor\u001b[39;00m layer_config \u001b[39min\u001b[39;00m layer_configs:\n\u001b[1;32m 478\u001b[0m layer \u001b[39m=\u001b[39m layer_module\u001b[39m.\u001b[39mdeserialize(\n\u001b[1;32m 479\u001b[0m layer_config, custom_objects\u001b[39m=\u001b[39mcustom_objects\n\u001b[1;32m 480\u001b[0m )\n\u001b[0;32m--> 481\u001b[0m model\u001b[39m.\u001b[39;49madd(layer)\n\u001b[1;32m 483\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mgetattr\u001b[39m(saving_lib\u001b[39m.\u001b[39m_SAVING_V3_ENABLED, \u001b[39m\"\u001b[39m\u001b[39mvalue\u001b[39m\u001b[39m\"\u001b[39m, \u001b[39mFalse\u001b[39;00m):\n\u001b[1;32m 484\u001b[0m compile_config \u001b[39m=\u001b[39m config\u001b[39m.\u001b[39mget(\u001b[39m\"\u001b[39m\u001b[39mcompile_config\u001b[39m\u001b[39m\"\u001b[39m, \u001b[39mNone\u001b[39;00m)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/tensorflow/python/trackable/base.py:205\u001b[0m, in \u001b[0;36mno_automatic_dependency_tracking.<locals>._method_wrapper\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 203\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_self_setattr_tracking \u001b[39m=\u001b[39m \u001b[39mFalse\u001b[39;00m \u001b[39m# pylint: disable=protected-access\u001b[39;00m\n\u001b[1;32m 204\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m--> 205\u001b[0m result \u001b[39m=\u001b[39m method(\u001b[39mself\u001b[39;49m, \u001b[39m*\u001b[39;49margs, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n\u001b[1;32m 206\u001b[0m \u001b[39mfinally\u001b[39;00m:\n\u001b[1;32m 207\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_self_setattr_tracking \u001b[39m=\u001b[39m previous_value \u001b[39m# pylint: disable=protected-access\u001b[39;00m\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/utils/traceback_utils.py:65\u001b[0m, in \u001b[0;36mfilter_traceback.<locals>.error_handler\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 63\u001b[0m filtered_tb \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 64\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m---> 65\u001b[0m \u001b[39mreturn\u001b[39;00m fn(\u001b[39m*\u001b[39;49margs, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n\u001b[1;32m 66\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mException\u001b[39;00m \u001b[39mas\u001b[39;00m e:\n\u001b[1;32m 67\u001b[0m filtered_tb \u001b[39m=\u001b[39m _process_traceback_frames(e\u001b[39m.\u001b[39m__traceback__)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/engine/sequential.py:237\u001b[0m, in \u001b[0;36mSequential.add\u001b[0;34m(self, layer)\u001b[0m\n\u001b[1;32m 232\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_has_explicit_input_shape \u001b[39m=\u001b[39m \u001b[39mTrue\u001b[39;00m\n\u001b[1;32m 234\u001b[0m \u001b[39melif\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39moutputs:\n\u001b[1;32m 235\u001b[0m \u001b[39m# If the model is being built continuously on top of an input layer:\u001b[39;00m\n\u001b[1;32m 236\u001b[0m \u001b[39m# refresh its output.\u001b[39;00m\n\u001b[0;32m--> 237\u001b[0m output_tensor \u001b[39m=\u001b[39m layer(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49moutputs[\u001b[39m0\u001b[39;49m])\n\u001b[1;32m 238\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mlen\u001b[39m(tf\u001b[39m.\u001b[39mnest\u001b[39m.\u001b[39mflatten(output_tensor)) \u001b[39m!=\u001b[39m \u001b[39m1\u001b[39m:\n\u001b[1;32m 239\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mValueError\u001b[39;00m(SINGLE_LAYER_OUTPUT_ERROR_MSG)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/utils/traceback_utils.py:65\u001b[0m, in \u001b[0;36mfilter_traceback.<locals>.error_handler\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 63\u001b[0m filtered_tb \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 64\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m---> 65\u001b[0m \u001b[39mreturn\u001b[39;00m fn(\u001b[39m*\u001b[39;49margs, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n\u001b[1;32m 66\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mException\u001b[39;00m \u001b[39mas\u001b[39;00m e:\n\u001b[1;32m 67\u001b[0m filtered_tb \u001b[39m=\u001b[39m _process_traceback_frames(e\u001b[39m.\u001b[39m__traceback__)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/engine/base_layer.py:1045\u001b[0m, in \u001b[0;36mLayer.__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1037\u001b[0m \u001b[39m# Functional Model construction mode is invoked when `Layer`s are called\u001b[39;00m\n\u001b[1;32m 1038\u001b[0m \u001b[39m# on symbolic `KerasTensor`s, i.e.:\u001b[39;00m\n\u001b[1;32m 1039\u001b[0m \u001b[39m# >> inputs = tf.keras.Input(10)\u001b[39;00m\n\u001b[1;32m 1040\u001b[0m \u001b[39m# >> outputs = MyLayer()(inputs) # Functional construction mode.\u001b[39;00m\n\u001b[1;32m 1041\u001b[0m \u001b[39m# >> model = tf.keras.Model(inputs, outputs)\u001b[39;00m\n\u001b[1;32m 1042\u001b[0m \u001b[39mif\u001b[39;00m _in_functional_construction_mode(\n\u001b[1;32m 1043\u001b[0m \u001b[39mself\u001b[39m, inputs, args, kwargs, input_list\n\u001b[1;32m 1044\u001b[0m ):\n\u001b[0;32m-> 1045\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_functional_construction_call(\n\u001b[1;32m 1046\u001b[0m inputs, args, kwargs, input_list\n\u001b[1;32m 1047\u001b[0m )\n\u001b[1;32m 1049\u001b[0m \u001b[39m# Maintains info about the `Layer.call` stack.\u001b[39;00m\n\u001b[1;32m 1050\u001b[0m call_context \u001b[39m=\u001b[39m base_layer_utils\u001b[39m.\u001b[39mcall_context()\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/engine/base_layer.py:2535\u001b[0m, in \u001b[0;36mLayer._functional_construction_call\u001b[0;34m(self, inputs, args, kwargs, input_list)\u001b[0m\n\u001b[1;32m 2528\u001b[0m training_arg_passed_by_framework \u001b[39m=\u001b[39m \u001b[39mTrue\u001b[39;00m\n\u001b[1;32m 2530\u001b[0m \u001b[39mwith\u001b[39;00m call_context\u001b[39m.\u001b[39menter(\n\u001b[1;32m 2531\u001b[0m layer\u001b[39m=\u001b[39m\u001b[39mself\u001b[39m, inputs\u001b[39m=\u001b[39minputs, build_graph\u001b[39m=\u001b[39m\u001b[39mTrue\u001b[39;00m, training\u001b[39m=\u001b[39mtraining_value\n\u001b[1;32m 2532\u001b[0m ):\n\u001b[1;32m 2533\u001b[0m \u001b[39m# Check input assumptions set after layer building, e.g. input\u001b[39;00m\n\u001b[1;32m 2534\u001b[0m \u001b[39m# shape.\u001b[39;00m\n\u001b[0;32m-> 2535\u001b[0m outputs \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_keras_tensor_symbolic_call(\n\u001b[1;32m 2536\u001b[0m inputs, input_masks, args, kwargs\n\u001b[1;32m 2537\u001b[0m )\n\u001b[1;32m 2539\u001b[0m \u001b[39mif\u001b[39;00m outputs \u001b[39mis\u001b[39;00m \u001b[39mNone\u001b[39;00m:\n\u001b[1;32m 2540\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mValueError\u001b[39;00m(\n\u001b[1;32m 2541\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mA layer\u001b[39m\u001b[39m'\u001b[39m\u001b[39ms `call` method should return a \u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m 2542\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mTensor or a list of Tensors, not None \u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m 2543\u001b[0m \u001b[39m\"\u001b[39m\u001b[39m(layer: \u001b[39m\u001b[39m\"\u001b[39m \u001b[39m+\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mname \u001b[39m+\u001b[39m \u001b[39m\"\u001b[39m\u001b[39m).\u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m 2544\u001b[0m )\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/engine/base_layer.py:2382\u001b[0m, in \u001b[0;36mLayer._keras_tensor_symbolic_call\u001b[0;34m(self, inputs, input_masks, args, kwargs)\u001b[0m\n\u001b[1;32m 2378\u001b[0m \u001b[39mreturn\u001b[39;00m tf\u001b[39m.\u001b[39mnest\u001b[39m.\u001b[39mmap_structure(\n\u001b[1;32m 2379\u001b[0m keras_tensor\u001b[39m.\u001b[39mKerasTensor, output_signature\n\u001b[1;32m 2380\u001b[0m )\n\u001b[1;32m 2381\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[0;32m-> 2382\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_infer_output_signature(\n\u001b[1;32m 2383\u001b[0m inputs, args, kwargs, input_masks\n\u001b[1;32m 2384\u001b[0m )\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/engine/base_layer.py:2418\u001b[0m, in \u001b[0;36mLayer._infer_output_signature\u001b[0;34m(self, inputs, args, kwargs, input_masks)\u001b[0m\n\u001b[1;32m 2414\u001b[0m scratch_graph \u001b[39m=\u001b[39m tf\u001b[39m.\u001b[39m__internal__\u001b[39m.\u001b[39mFuncGraph(\n\u001b[1;32m 2415\u001b[0m \u001b[39mstr\u001b[39m(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39mname) \u001b[39m+\u001b[39m \u001b[39m\"\u001b[39m\u001b[39m_scratch_graph\u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m 2416\u001b[0m )\n\u001b[1;32m 2417\u001b[0m \u001b[39mwith\u001b[39;00m scratch_graph\u001b[39m.\u001b[39mas_default():\n\u001b[0;32m-> 2418\u001b[0m inputs \u001b[39m=\u001b[39m tf\u001b[39m.\u001b[39;49mnest\u001b[39m.\u001b[39;49mmap_structure(\n\u001b[1;32m 2419\u001b[0m keras_tensor\u001b[39m.\u001b[39;49mkeras_tensor_to_placeholder, inputs\n\u001b[1;32m 2420\u001b[0m )\n\u001b[1;32m 2421\u001b[0m args \u001b[39m=\u001b[39m tf\u001b[39m.\u001b[39mnest\u001b[39m.\u001b[39mmap_structure(\n\u001b[1;32m 2422\u001b[0m keras_tensor\u001b[39m.\u001b[39mkeras_tensor_to_placeholder, args\n\u001b[1;32m 2423\u001b[0m )\n\u001b[1;32m 2424\u001b[0m kwargs \u001b[39m=\u001b[39m tf\u001b[39m.\u001b[39mnest\u001b[39m.\u001b[39mmap_structure(\n\u001b[1;32m 2425\u001b[0m keras_tensor\u001b[39m.\u001b[39mkeras_tensor_to_placeholder, kwargs\n\u001b[1;32m 2426\u001b[0m )\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/tensorflow/python/util/nest.py:917\u001b[0m, in \u001b[0;36mmap_structure\u001b[0;34m(func, *structure, **kwargs)\u001b[0m\n\u001b[1;32m 913\u001b[0m flat_structure \u001b[39m=\u001b[39m (flatten(s, expand_composites) \u001b[39mfor\u001b[39;00m s \u001b[39min\u001b[39;00m structure)\n\u001b[1;32m 914\u001b[0m entries \u001b[39m=\u001b[39m \u001b[39mzip\u001b[39m(\u001b[39m*\u001b[39mflat_structure)\n\u001b[1;32m 916\u001b[0m \u001b[39mreturn\u001b[39;00m pack_sequence_as(\n\u001b[0;32m--> 917\u001b[0m structure[\u001b[39m0\u001b[39m], [func(\u001b[39m*\u001b[39mx) \u001b[39mfor\u001b[39;00m x \u001b[39min\u001b[39;00m entries],\n\u001b[1;32m 918\u001b[0m expand_composites\u001b[39m=\u001b[39mexpand_composites)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/tensorflow/python/util/nest.py:917\u001b[0m, in \u001b[0;36m<listcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 913\u001b[0m flat_structure \u001b[39m=\u001b[39m (flatten(s, expand_composites) \u001b[39mfor\u001b[39;00m s \u001b[39min\u001b[39;00m structure)\n\u001b[1;32m 914\u001b[0m entries \u001b[39m=\u001b[39m \u001b[39mzip\u001b[39m(\u001b[39m*\u001b[39mflat_structure)\n\u001b[1;32m 916\u001b[0m \u001b[39mreturn\u001b[39;00m pack_sequence_as(\n\u001b[0;32m--> 917\u001b[0m structure[\u001b[39m0\u001b[39m], [func(\u001b[39m*\u001b[39;49mx) \u001b[39mfor\u001b[39;00m x \u001b[39min\u001b[39;00m entries],\n\u001b[1;32m 918\u001b[0m expand_composites\u001b[39m=\u001b[39mexpand_composites)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/engine/keras_tensor.py:648\u001b[0m, in \u001b[0;36mkeras_tensor_to_placeholder\u001b[0;34m(x)\u001b[0m\n\u001b[1;32m 646\u001b[0m \u001b[39m\u001b[39m\u001b[39m\"\"\"Construct a graph placeholder to represent a KerasTensor when tracing.\"\"\"\u001b[39;00m\n\u001b[1;32m 647\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39misinstance\u001b[39m(x, KerasTensor):\n\u001b[0;32m--> 648\u001b[0m \u001b[39mreturn\u001b[39;00m x\u001b[39m.\u001b[39;49m_to_placeholder()\n\u001b[1;32m 649\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m 650\u001b[0m \u001b[39mreturn\u001b[39;00m x\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/engine/keras_tensor.py:236\u001b[0m, in \u001b[0;36mKerasTensor._to_placeholder\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 233\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mcomponent_to_placeholder\u001b[39m(component):\n\u001b[1;32m 234\u001b[0m \u001b[39mreturn\u001b[39;00m tf\u001b[39m.\u001b[39mcompat\u001b[39m.\u001b[39mv1\u001b[39m.\u001b[39mplaceholder(component\u001b[39m.\u001b[39mdtype, component\u001b[39m.\u001b[39mshape)\n\u001b[0;32m--> 236\u001b[0m \u001b[39mreturn\u001b[39;00m tf\u001b[39m.\u001b[39;49mnest\u001b[39m.\u001b[39;49mmap_structure(\n\u001b[1;32m 237\u001b[0m component_to_placeholder, \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtype_spec, expand_composites\u001b[39m=\u001b[39;49m\u001b[39mTrue\u001b[39;49;00m\n\u001b[1;32m 238\u001b[0m )\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/tensorflow/python/util/nest.py:917\u001b[0m, in \u001b[0;36mmap_structure\u001b[0;34m(func, *structure, **kwargs)\u001b[0m\n\u001b[1;32m 913\u001b[0m flat_structure \u001b[39m=\u001b[39m (flatten(s, expand_composites) \u001b[39mfor\u001b[39;00m s \u001b[39min\u001b[39;00m structure)\n\u001b[1;32m 914\u001b[0m entries \u001b[39m=\u001b[39m \u001b[39mzip\u001b[39m(\u001b[39m*\u001b[39mflat_structure)\n\u001b[1;32m 916\u001b[0m \u001b[39mreturn\u001b[39;00m pack_sequence_as(\n\u001b[0;32m--> 917\u001b[0m structure[\u001b[39m0\u001b[39m], [func(\u001b[39m*\u001b[39mx) \u001b[39mfor\u001b[39;00m x \u001b[39min\u001b[39;00m entries],\n\u001b[1;32m 918\u001b[0m expand_composites\u001b[39m=\u001b[39mexpand_composites)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/tensorflow/python/util/nest.py:917\u001b[0m, in \u001b[0;36m<listcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 913\u001b[0m flat_structure \u001b[39m=\u001b[39m (flatten(s, expand_composites) \u001b[39mfor\u001b[39;00m s \u001b[39min\u001b[39;00m structure)\n\u001b[1;32m 914\u001b[0m entries \u001b[39m=\u001b[39m \u001b[39mzip\u001b[39m(\u001b[39m*\u001b[39mflat_structure)\n\u001b[1;32m 916\u001b[0m \u001b[39mreturn\u001b[39;00m pack_sequence_as(\n\u001b[0;32m--> 917\u001b[0m structure[\u001b[39m0\u001b[39m], [func(\u001b[39m*\u001b[39;49mx) \u001b[39mfor\u001b[39;00m x \u001b[39min\u001b[39;00m entries],\n\u001b[1;32m 918\u001b[0m expand_composites\u001b[39m=\u001b[39mexpand_composites)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/keras/engine/keras_tensor.py:234\u001b[0m, in \u001b[0;36mKerasTensor._to_placeholder.<locals>.component_to_placeholder\u001b[0;34m(component)\u001b[0m\n\u001b[1;32m 233\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mcomponent_to_placeholder\u001b[39m(component):\n\u001b[0;32m--> 234\u001b[0m \u001b[39mreturn\u001b[39;00m tf\u001b[39m.\u001b[39;49mcompat\u001b[39m.\u001b[39;49mv1\u001b[39m.\u001b[39;49mplaceholder(component\u001b[39m.\u001b[39;49mdtype, component\u001b[39m.\u001b[39;49mshape)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/tensorflow/python/ops/array_ops.py:3343\u001b[0m, in \u001b[0;36mplaceholder\u001b[0;34m(dtype, shape, name)\u001b[0m\n\u001b[1;32m 3339\u001b[0m \u001b[39mif\u001b[39;00m context\u001b[39m.\u001b[39mexecuting_eagerly():\n\u001b[1;32m 3340\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mRuntimeError\u001b[39;00m(\u001b[39m\"\u001b[39m\u001b[39mtf.placeholder() is not compatible with \u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m 3341\u001b[0m \u001b[39m\"\u001b[39m\u001b[39meager execution.\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[0;32m-> 3343\u001b[0m \u001b[39mreturn\u001b[39;00m gen_array_ops\u001b[39m.\u001b[39;49mplaceholder(dtype\u001b[39m=\u001b[39;49mdtype, shape\u001b[39m=\u001b[39;49mshape, name\u001b[39m=\u001b[39;49mname)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/tensorflow/python/ops/gen_array_ops.py:6898\u001b[0m, in \u001b[0;36mplaceholder\u001b[0;34m(dtype, shape, name)\u001b[0m\n\u001b[1;32m 6896\u001b[0m shape \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 6897\u001b[0m shape \u001b[39m=\u001b[39m _execute\u001b[39m.\u001b[39mmake_shape(shape, \u001b[39m\"\u001b[39m\u001b[39mshape\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[0;32m-> 6898\u001b[0m _, _, _op, _outputs \u001b[39m=\u001b[39m _op_def_library\u001b[39m.\u001b[39;49m_apply_op_helper(\n\u001b[1;32m 6899\u001b[0m \u001b[39m\"\u001b[39;49m\u001b[39mPlaceholder\u001b[39;49m\u001b[39m\"\u001b[39;49m, dtype\u001b[39m=\u001b[39;49mdtype, shape\u001b[39m=\u001b[39;49mshape, name\u001b[39m=\u001b[39;49mname)\n\u001b[1;32m 6900\u001b[0m _result \u001b[39m=\u001b[39m _outputs[:]\n\u001b[1;32m 6901\u001b[0m \u001b[39mif\u001b[39;00m _execute\u001b[39m.\u001b[39mmust_record_gradient():\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/tensorflow/python/framework/op_def_library.py:795\u001b[0m, in \u001b[0;36m_apply_op_helper\u001b[0;34m(op_type_name, name, **keywords)\u001b[0m\n\u001b[1;32m 790\u001b[0m must_colocate_inputs \u001b[39m=\u001b[39m [val \u001b[39mfor\u001b[39;00m arg, val \u001b[39min\u001b[39;00m \u001b[39mzip\u001b[39m(op_def\u001b[39m.\u001b[39minput_arg, inputs)\n\u001b[1;32m 791\u001b[0m \u001b[39mif\u001b[39;00m arg\u001b[39m.\u001b[39mis_ref]\n\u001b[1;32m 792\u001b[0m \u001b[39mwith\u001b[39;00m _MaybeColocateWith(must_colocate_inputs):\n\u001b[1;32m 793\u001b[0m \u001b[39m# Add Op to graph\u001b[39;00m\n\u001b[1;32m 794\u001b[0m \u001b[39m# pylint: disable=protected-access\u001b[39;00m\n\u001b[0;32m--> 795\u001b[0m op \u001b[39m=\u001b[39m g\u001b[39m.\u001b[39;49m_create_op_internal(op_type_name, inputs, dtypes\u001b[39m=\u001b[39;49m\u001b[39mNone\u001b[39;49;00m,\n\u001b[1;32m 796\u001b[0m name\u001b[39m=\u001b[39;49mscope, input_types\u001b[39m=\u001b[39;49minput_types,\n\u001b[1;32m 797\u001b[0m attrs\u001b[39m=\u001b[39;49mattr_protos, op_def\u001b[39m=\u001b[39;49mop_def)\n\u001b[1;32m 799\u001b[0m \u001b[39m# `outputs` is returned as a separate return value so that the output\u001b[39;00m\n\u001b[1;32m 800\u001b[0m \u001b[39m# tensors can the `op` per se can be decoupled so that the\u001b[39;00m\n\u001b[1;32m 801\u001b[0m \u001b[39m# `op_callbacks` can function properly. See framework/op_callbacks.py\u001b[39;00m\n\u001b[1;32m 802\u001b[0m \u001b[39m# for more details.\u001b[39;00m\n\u001b[1;32m 803\u001b[0m outputs \u001b[39m=\u001b[39m op\u001b[39m.\u001b[39moutputs\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/tensorflow/python/framework/func_graph.py:749\u001b[0m, in \u001b[0;36mFuncGraph._create_op_internal\u001b[0;34m(self, op_type, inputs, dtypes, input_types, name, attrs, op_def, compute_device)\u001b[0m\n\u001b[1;32m 747\u001b[0m inp \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mcapture(inp)\n\u001b[1;32m 748\u001b[0m captured_inputs\u001b[39m.\u001b[39mappend(inp)\n\u001b[0;32m--> 749\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39msuper\u001b[39;49m(FuncGraph, \u001b[39mself\u001b[39;49m)\u001b[39m.\u001b[39;49m_create_op_internal( \u001b[39m# pylint: disable=protected-access\u001b[39;49;00m\n\u001b[1;32m 750\u001b[0m op_type, captured_inputs, dtypes, input_types, name, attrs, op_def,\n\u001b[1;32m 751\u001b[0m compute_device)\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/tensorflow/python/framework/ops.py:3798\u001b[0m, in \u001b[0;36mGraph._create_op_internal\u001b[0;34m(self, op_type, inputs, dtypes, input_types, name, attrs, op_def, compute_device)\u001b[0m\n\u001b[1;32m 3795\u001b[0m \u001b[39m# _create_op_helper mutates the new Operation. `_mutation_lock` ensures a\u001b[39;00m\n\u001b[1;32m 3796\u001b[0m \u001b[39m# Session.run call cannot occur between creating and mutating the op.\u001b[39;00m\n\u001b[1;32m 3797\u001b[0m \u001b[39mwith\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_mutation_lock():\n\u001b[0;32m-> 3798\u001b[0m ret \u001b[39m=\u001b[39m Operation(\n\u001b[1;32m 3799\u001b[0m node_def,\n\u001b[1;32m 3800\u001b[0m \u001b[39mself\u001b[39;49m,\n\u001b[1;32m 3801\u001b[0m inputs\u001b[39m=\u001b[39;49minputs,\n\u001b[1;32m 3802\u001b[0m output_types\u001b[39m=\u001b[39;49mdtypes,\n\u001b[1;32m 3803\u001b[0m control_inputs\u001b[39m=\u001b[39;49mcontrol_inputs,\n\u001b[1;32m 3804\u001b[0m input_types\u001b[39m=\u001b[39;49minput_types,\n\u001b[1;32m 3805\u001b[0m original_op\u001b[39m=\u001b[39;49m\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_default_original_op,\n\u001b[1;32m 3806\u001b[0m op_def\u001b[39m=\u001b[39;49mop_def)\n\u001b[1;32m 3807\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_create_op_helper(ret, compute_device\u001b[39m=\u001b[39mcompute_device)\n\u001b[1;32m 3808\u001b[0m \u001b[39mreturn\u001b[39;00m ret\n",
"File \u001b[0;32m~/miniconda3/envs/pso/lib/python3.9/site-packages/tensorflow/python/framework/ops.py:2085\u001b[0m, in \u001b[0;36mOperation.__init__\u001b[0;34m(***failed resolving arguments***)\u001b[0m\n\u001b[1;32m 2082\u001b[0m input_types \u001b[39m=\u001b[39m [i\u001b[39m.\u001b[39mdtype\u001b[39m.\u001b[39mbase_dtype \u001b[39mfor\u001b[39;00m i \u001b[39min\u001b[39;00m inputs]\n\u001b[1;32m 2083\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m 2084\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mall\u001b[39m(\n\u001b[0;32m-> 2085\u001b[0m x\u001b[39m.\u001b[39mis_compatible_with(i\u001b[39m.\u001b[39mdtype) \u001b[39mfor\u001b[39;00m i, x \u001b[39min\u001b[39;00m \u001b[39mzip\u001b[39;49m(inputs, input_types)):\n\u001b[1;32m 2086\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mTypeError\u001b[39;00m(\u001b[39m\"\u001b[39m\u001b[39mIn op \u001b[39m\u001b[39m'\u001b[39m\u001b[39m%s\u001b[39;00m\u001b[39m'\u001b[39m\u001b[39m, input types (\u001b[39m\u001b[39m%s\u001b[39;00m\u001b[39m) are not compatible \u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m 2087\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mwith expected types (\u001b[39m\u001b[39m%s\u001b[39;00m\u001b[39m)\u001b[39m\u001b[39m\"\u001b[39m \u001b[39m%\u001b[39m\n\u001b[1;32m 2088\u001b[0m (node_def\u001b[39m.\u001b[39mname, [i\u001b[39m.\u001b[39mdtype \u001b[39mfor\u001b[39;00m i \u001b[39min\u001b[39;00m inputs], input_types))\n\u001b[1;32m 2090\u001b[0m \u001b[39m# Build the list of control inputs.\u001b[39;00m\n",
"\u001b[0;31mKeyboardInterrupt\u001b[0m: "
]
}
],
"source": [
"%load_ext memory_profiler\n",
"import linecache\n",
"import os\n",
"import tracemalloc\n",
"\n",
"\n",
"def display_top(snapshot, key_type=\"lineno\", limit=10):\n",
" snapshot = snapshot.filter_traces(\n",
" (\n",
" tracemalloc.Filter(False, \"<frozen importlib._bootstrap>\"),\n",
" tracemalloc.Filter(False, \"<unknown>\"),\n",
" )\n",
" )\n",
" top_stats = snapshot.statistics(key_type)\n",
"\n",
" print(\"Top %s lines\" % limit)\n",
" for index, stat in enumerate(top_stats[:limit], 1):\n",
" frame = stat.traceback[0]\n",
" print(\n",
" \"#%s: %s:%s: %.1f KiB\"\n",
" % (index, frame.filename, frame.lineno, stat.size / 1024)\n",
" )\n",
" line = linecache.getline(frame.filename, frame.lineno).strip()\n",
" if line:\n",
" print(\" %s\" % line)\n",
"\n",
" other = top_stats[limit:]\n",
" if other:\n",
" size = sum(stat.size for stat in other)\n",
" print(\"%s other: %.1f KiB\" % (len(other), size / 1024))\n",
" total = sum(stat.size for stat in top_stats)\n",
" print(\"Total allocated size: %.1f KiB\" % (total / 1024))\n",
"\n",
"\n",
"tracemalloc.start()\n",
"\n",
"model = make_model()\n",
"x_train, y_train = get_data_test()\n",
"\n",
"loss = [\n",
" \"mean_squared_error\",\n",
" \"categorical_crossentropy\",\n",
" \"sparse_categorical_crossentropy\",\n",
" \"binary_crossentropy\",\n",
" \"kullback_leibler_divergence\",\n",
" \"poisson\",\n",
" \"cosine_similarity\",\n",
" \"log_cosh\",\n",
" \"huber_loss\",\n",
" \"mean_absolute_error\",\n",
" \"mean_absolute_percentage_error\",\n",
"]\n",
"\n",
"\n",
"pso_mnist = Optimizer(\n",
" model,\n",
" loss=loss[0],\n",
" n_particles=70,\n",
" c0=0.3,\n",
" c1=0.5,\n",
" w_min=0.4,\n",
" w_max=0.7,\n",
" negative_swarm=0.1,\n",
" mutation_swarm=0.2,\n",
" particle_min=-5,\n",
" particle_max=5,\n",
")\n",
"\n",
"best_score = pso_mnist.fit(\n",
" x_train,\n",
" y_train,\n",
" epochs=200,\n",
" save_info=True,\n",
" log=2,\n",
" save_path=\"./result/mnist\",\n",
" renewal=\"acc\",\n",
" check_point=25,\n",
")\n",
"\n",
"print(\"Done!\")\n",
"\n",
"snapshot = tracemalloc.take_snapshot()\n",
"display_top(snapshot)\n",
"\n",
"%memit\n",
"gc.collect()\n",
"sys.exit(0)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "pso",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.16"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@@ -80,11 +80,11 @@ loss = [
pso_mnist = Optimizer( pso_mnist = Optimizer(
model, model,
loss=loss[0], loss=loss[0],
n_particles=150, n_particles=70,
c0=0.2, c0=0.3,
c1=0.35, c1=0.5,
w_min=0.25, w_min=0.4,
w_max=0.5, w_max=0.7,
negative_swarm=0.1, negative_swarm=0.1,
mutation_swarm=0.2, mutation_swarm=0.2,
particle_min=-5, particle_min=-5,
@@ -94,14 +94,16 @@ pso_mnist = Optimizer(
best_score = pso_mnist.fit( best_score = pso_mnist.fit(
x_train, x_train,
y_train, y_train,
epochs=100, epochs=200,
save_info=True, save_info=True,
log=2, log=2,
log_name="mnist",
save_path="./result/mnist", save_path="./result/mnist",
renewal="acc", renewal="acc",
check_point=25, check_point=25,
) )
print("Done!") print("Done!")
gc.collect() gc.collect()
sys.exit(0) sys.exit(0)

View File

@@ -197,7 +197,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.8.16" "version": "3.9.16"
}, },
"widgets": { "widgets": {
"application/vnd.jupyter.widget-state+json": { "application/vnd.jupyter.widget-state+json": {

View File

@@ -1,7 +1,7 @@
from .optimizer import Optimizer from .optimizer import Optimizer
from .particle import Particle from .particle import Particle
__version__ = "0.1.0" __version__ = "0.1.3"
__all__ = [ __all__ = [
"Optimizer", "Optimizer",

View File

@@ -7,7 +7,7 @@ from datetime import datetime
import numpy as np import numpy as np
import tensorflow as tf import tensorflow as tf
from tensorflow import keras from tensorflow import keras
from tqdm import tqdm from tqdm.auto import tqdm
from .particle import Particle from .particle import Particle
@@ -89,6 +89,7 @@ class Optimizer:
self.empirical_balance = False self.empirical_balance = False
negative_count = 0 negative_count = 0
print(f"start running time : {self.day}")
for i in tqdm(range(self.n_particles), desc="Initializing Particles"): for i in tqdm(range(self.n_particles), desc="Initializing Particles"):
m = keras.models.model_from_json(model.to_json()) m = keras.models.model_from_json(model.to_json())
init_weights = m.get_weights() init_weights = m.get_weights()
@@ -206,6 +207,7 @@ class Optimizer:
y, y,
epochs: int = 100, epochs: int = 100,
log: int = 0, log: int = 0,
log_name: str = None,
save_info: bool = False, save_info: bool = False,
save_path: str = "./result", save_path: str = "./result",
renewal: str = "acc", renewal: str = "acc",
@@ -231,13 +233,19 @@ class Optimizer:
self.Dispersion = Dispersion self.Dispersion = Dispersion
self.renewal = renewal self.renewal = renewal
try:
if log == 2: if log == 2:
train_log_dir = "logs/gradient_tape/" + self.day + "/train" assert log_name is not None, "log_name is None"
train_log_dir = f"logs/{log_name}/{self.day}/train"
self.train_summary_writer = [None] * self.n_particles self.train_summary_writer = [None] * self.n_particles
for i in range(self.n_particles): for i in range(self.n_particles):
self.train_summary_writer[i] = tf.summary.create_file_writer( self.train_summary_writer[i] = tf.summary.create_file_writer(
train_log_dir + f"/{i}" train_log_dir + f"/{i}"
) )
except AssertionError as e:
print(e)
sys.exit(1)
try: try:
if check_point is not None or log == 1: if check_point is not None or log == 1:
if save_path is None: if save_path is None:
@@ -289,7 +297,10 @@ class Optimizer:
f.write(", ") f.write(", ")
else: else:
f.write("\n") f.write("\n")
if log == 2:
with self.train_summary_writer[i].as_default():
tf.summary.scalar("loss", local_score[0], step=0)
tf.summary.scalar("accuracy", local_score[1], step=0)
del local_score del local_score
gc.collect() gc.collect()
@@ -302,18 +313,19 @@ class Optimizer:
range(epochs), range(epochs),
desc=f"best {self.g_best_score[0]:.4f}|{self.g_best_score[1]:.4f}", desc=f"best {self.g_best_score[0]:.4f}|{self.g_best_score[1]:.4f}",
ascii=True, ascii=True,
leave=False, leave=True,
position=0,
) )
for epoch in epochs_pbar: for epoch in epochs_pbar:
acc = 0
loss = 0
max_score = 0 max_score = 0
min_loss = np.inf min_loss = np.inf
part_pbar = tqdm( part_pbar = tqdm(
range(len(self.particles)), range(len(self.particles)),
desc=f"acc : {max_score:.4f} loss : {min_loss:.4f}", desc=f"acc : {max_score:.4f} loss : {min_loss:.4f}",
ascii=True, ascii=True,
leave=False, leave=False,
position=1,
) )
w = self.w_max - (self.w_max - self.w_min) * epoch / epochs w = self.w_max - (self.w_max - self.w_min) * epoch / epochs
for i in part_pbar: for i in part_pbar:
@@ -371,8 +383,8 @@ class Optimizer:
if log == 2: if log == 2:
with self.train_summary_writer[i].as_default(): with self.train_summary_writer[i].as_default():
tf.summary.scalar("loss", score[0], step=epoch) tf.summary.scalar("loss", score[0], step=epoch + 1)
tf.summary.scalar("accuracy", score[1], step=epoch) tf.summary.scalar("accuracy", score[1], step=epoch + 1)
if renewal == "acc": if renewal == "acc":
if score[1] >= max_score: if score[1] >= max_score:
@@ -424,14 +436,6 @@ class Optimizer:
f"best {self.g_best_score[0]:.4f} | {self.g_best_score[1]:.4f}" f"best {self.g_best_score[0]:.4f} | {self.g_best_score[1]:.4f}"
) )
if score[0] == None:
score[0] = np.inf
if score[1] == None:
score[1] = 0
loss = loss + score[0]
acc = acc + score[1]
if log == 1: if log == 1:
with open( with open(
f"./{save_path}/{self.day}_{self.n_particles}_{epochs}_{self.c0}_{self.c1}_{self.w_min}_{renewal}.csv", f"./{save_path}/{self.day}_{self.n_particles}_{epochs}_{self.c0}_{self.c1}_{self.w_min}_{renewal}.csv",
@@ -443,11 +447,12 @@ class Optimizer:
else: else:
f.write("\n") f.write("\n")
part_pbar.refresh()
if check_point is not None: if check_point is not None:
if epoch % check_point == 0: if epoch % check_point == 0:
os.makedirs(f"./{save_path}/{self.day}", exist_ok=True) os.makedirs(f"./{save_path}/{self.day}", exist_ok=True)
self._check_point_save(f"./{save_path}/{self.day}/ckpt-{epoch}") self._check_point_save(f"./{save_path}/{self.day}/ckpt-{epoch}")
self.avg_score = acc / self.n_particles
gc.collect() gc.collect()

View File

@@ -153,7 +153,7 @@ class Particle:
) )
if np.random.rand() < self.mutation: if np.random.rand() < self.mutation:
m_v = np.random.uniform(-0.1, 0.1, len(encode_v)) m_v = np.random.uniform(-0.2, 0.2, len(encode_v))
new_v = m_v new_v = m_v
self.velocities = self._decode(new_v, w_sh, w_len) self.velocities = self._decode(new_v, w_sh, w_len)

View File

@@ -3,5 +3,5 @@ keras==2.11.0
matplotlib @ file:///croot/matplotlib-suite_1679593461707/work matplotlib @ file:///croot/matplotlib-suite_1679593461707/work
numpy @ file:///work/mkl/numpy_and_numpy_base_1682953417311/work numpy @ file:///work/mkl/numpy_and_numpy_base_1682953417311/work
pandas==1.5.3 pandas==1.5.3
tensorflow==2.11.0 tensorflow==2.11.1
tqdm @ file:///croot/tqdm_1679561862951/work tqdm @ file:///croot/tqdm_1679561862951/work

View File

@@ -1,16 +1,20 @@
from setuptools import setup, find_packages from setuptools import setup, find_packages
import pso
VERSION = pso.__version__
setup( setup(
name="pso2keras", name="pso2keras",
version="0.1.2", version=VERSION,
description="Particle Swarm Optimization to tensorflow package", description="Particle Swarm Optimization to tensorflow package",
author="pieroot", author="pieroot",
author_email="jgbong0306@gmail.com", author_email="jgbong0306@gmail.com",
url="https://github.com/jung-geun/PSO", url="https://github.com/jung-geun/PSO",
install_requires=[ install_requires=[
"tqdm==4.65.0", "tqdm==4.65.0",
"tensorflow==2.11.0", "tensorflow==2.11.1",
"keras==2.11.0", "tensorboard==2.11.2",
"numpy", "numpy",
"pandas", "pandas",
"ipython", "ipython",

5
xor.py
View File

@@ -64,11 +64,10 @@ best_score = pso_xor.fit(
x_test, x_test,
y_test, y_test,
epochs=200, epochs=200,
save=True, save_info=True,
log=2,
save_path="./result/xor", save_path="./result/xor",
renewal="acc", renewal="acc",
empirical_balance=False,
Dispersion=False,
check_point=25, check_point=25,
) )