1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
|
import numpy as np
import tensorflow as tf
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.layers import Input, Flatten, Dense, Dropout, concatenate, LSTM
import matplotlib.pyplot as plt
# 함수형 API
def make_model():
input = Input(shape=(8, )) # Input() 함수에 입력의 크기를 정의
x = Dense(16, activation='relu')(input)
x = Dense(32, activation='relu')(x)
x = Dense(64, activation='relu')(x)
output = Dense(1, activation='sigmoid')(x)
model = Model(inputs=input, outputs=output) # Model() 함수에 입력과 출력을 정의
return model
def make_model_2():
input1 = Input(shape=(64,))
input2 = Input(shape=(128,))
x = Dense(16, activation="relu")(input1)
x = Dense(8, activation="relu")(x)
x = Model(inputs=input1, outputs=x)
y = Dense(64, activation="relu")(input2)
y = Dense(32, activation="relu")(y)
y = Dense(8, activation="relu")(y)
y = Model(inputs=input2, outputs=y)
result = concatenate([x.output, y.output]) # 출력을 연결(concatenate)
z = Dense(1, activation="linear")(result)
model = Model(inputs=[x.input, y.input], outputs=z)
return model
def make_model3():
input = Input(shape=(50, 1))
x = LSTM(10)(input)
x = Dense(10, activation='relu')(x)
output = Dense(1, activation='linear')(x)
model = Model(inputs=input, outputs=output)
return model
|
cs |
반응형
'머신러닝_딥러닝 > Tensorflow + Keras' 카테고리의 다른 글
(Tensorflow 2.x) RNN (0) | 2021.11.14 |
---|---|
(Tensorflow 2.x) 커스텀 Loss function (0) | 2021.11.14 |
(Tensorflow 2.x) CNN 3탄 (Transfer Learning) (0) | 2021.10.23 |
(Tensorflow 2.x) CNN 2탄 (With Garbage Classification) (0) | 2021.10.23 |
(Tensorflow 2.x) CNN 1탄 (With CIFAR-10 ) (0) | 2021.10.23 |