# 0. 사용할 패키지 불러오기
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
# 랜덤시드 고정시키기
np.random.seed(5)
# 1. 데이터 준비하기
dataset = np.loadtxt("c:/users/pc366/Desktop/pima-indians-diabetes.csv", delimiter=",")
# 2. 데이터셋 생성하기
x_train = dataset[:700,0:8]
y_train = dataset[:700,8]
x_test = dataset[700:,0:8]
y_test = dataset[700:,8]
# 3. 모델 구성하기
model = Sequential()
model.add(Dense(12, input_dim=8, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
#from IPython.display import SVG
#from keras.utils.vis_utils import model_to_dot
#%matplotlib inline
#SVG(model_to_dot(model, show_shapes=True,dpi=60).create(prog='dot', format='svg'))
from keras.models import load_model
model.save('diabetes_model.h5')
# 모델 summary
Layer (type) Output Shape Param #
dense_33 (Dense) (None, 12) 108
dense_34 (Dense) (None, 8) 104
dense_35 (Dense) (None, 1) 9 =================================================================
Total params: 221
Trainable params: 221
Non-trainable params: 0
# 모델 시각화에 attribute error가 나와서
# netron을 이용하여 처리하였음.
=============================================================
# 0. 사용할 패키지 불러오기
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
# 랜덤시드 고정시키기
np.random.seed(5)
# 1. 데이터 준비하기
dataset = np.loadtxt("c:/users/pc366/Desktop/pima-indians-diabetes.csv", delimiter=",")
# 2. 데이터셋 생성하기
x_train = dataset[:700,0:8]
y_train = dataset[:700,8]
x_test = dataset[700:,0:8]
y_test = dataset[700:,8]
# 3. 모델 구성하기
model = Sequential()
model.add(Dense(12, input_dim=8, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
# 4. 모델 학습과정 설정하기
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
# 5. 모델 학습시키기
model.fit(x_train, y_train, epochs=1500, batch_size=64)
# 6. 모델 평가하기
scores = model.evaluate(x_test, y_test)
print("%s: %.2f%%" %(model.metrics_names[1], scores[1]*100))
Epoch 1495/1500 11/11 [==============================] - 0s 801us/step - loss: 0.3824 - accuracy: 0.8214 Epoch 1496/1500 11/11 [==============================] - 0s 806us/step - loss: 0.3851 - accuracy: 0.8100 Epoch 1497/1500 11/11 [==============================] - 0s 841us/step - loss: 0.3816 - accuracy: 0.8114 Epoch 1498/1500 11/11 [==============================] - 0s 822us/step - loss: 0.3821 - accuracy: 0.8114 Epoch 1499/1500 11/11 [==============================] - 0s 806us/step - loss: 0.3798 - accuracy: 0.8171 Epoch 1500/1500 11/11 [==============================] - 0s 898us/step - loss: 0.3825 - accuracy: 0.8157
3/3 [==============================] - 0s 8ms/step - loss: 0.5963 - accuracy: 0.7500
accuracy: 75.00%