>>19感謝,原來是這個原因
另外的那個問題是類似附圖那樣
有一點講錯了是sigmoid才對,因為target只有一個dimension
具體來說是如下的測試
import numpy as np
from matplotlib import pyplot as plt
from tensorflow import keras
from sklearn.datasets import make_blobs
from sklearn.model_selection import train_test_split
def get_mlp(loss):
mlp = keras.Sequential([
keras.Input(n_features),
keras.layers.Dense(32,activation='relu'),
keras.layers.Dense(32,activation='relu'),
keras.layers.Dense(1,activation='sigmoid')
])
mlp.compile(optimizer='adam', loss=loss)
return mlp
n_features = 8
x, y = make_blobs(n_samples=(81920, 38400), n_features=8,
centers=None, cluster_std=[30, 15])
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=.25)
mlp1 = get_mlp('bce')
mlp2 = get_mlp('mae')
history = mlp1.fit(x_train, y_train, epochs=30, batch_size=1024,
use_multiprocessing=True, workers=4, verbose=2)
history = mlp2.fit(x_train, y_train, epochs=30, batch_size=1024,
use_multiprocessing=True, workers=4, verbose=2)
yp1 = mlp1.predict(x_test)
yp2 = mlp2.predict(x_test)
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 4))
_ = ax1.hist(yp1, bins=50)
_ = ax2.hist(yp2, bins=50)