# 什幺？神经网络还能创造新知识？

#### 自底向上构造异或运算神经网络(XOR NN)

XOR (0,0) = XOR (1,1) = 0
XOR (1,0) = XOR (0,1) = 1

XOR (x,y) = AND ( NAND (x,y) , OR (x,y) )

XOR (x,y) = OR ( AND ( NOT(x) , y ) , AND ( x , NOT(y) ) )
XOR (x,y) = NAND ( NAND ( x , NAND ( x,y) ) , NAND ( y , NAND ( x,y) ) )

#### 使用TensorFlow Keras构建异或神经网络

Keras是一个功能强大且易于使用的神经网络库。上一节中建立了一个三层的2-2-1模型，并与之前建构的神经网络进行了比较。

# Generate NN for XOR operation
# input layer:  nodes, one for each bit (0 = false and +1 = true)
# output layer: 1 node for result (0 = false and +1 = true)
# Use sigmoid activation function, gradient descent optimizer and mean squared error loss function
# Last update: 28.05.2019

import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

# Define model
nodes = 2
model = tf.keras.Sequential()
model.summary()

# Generate train & test data
epochs = 10000
data_in = np.array([[0,0],[0,1],[1,0],[1,1]])
data_out = np.array([0,1,1,0])

# Train model
history = model.fit(data_in, data_out, epochsepochs=epochs, verbose=0)

# Analysis of training history
for key in history.history.keys():
plt.scatter(range(epochs), history.history[key], s=1)
plt.ylabel(key)
plt.xlabel('epochs')
plt.show()

# Predict with model
result = model.predict(data_in)

# Print results
def printarray(arr):
return np.array2string(arr).replace('\n','')

print()
print('input', printarray(data_in))
print('output (calculation)', printarray(data_out))
print('output (prediction) ', printarray(result))
print('output (pred. norm.)', printarray(np.round(result)))

# Get weights of model
print()
print(model.get_weights())

#### 分析和结论

Python的脚本输出

Keras在异或运算神经网络中的布尔函数

XOR (x,y) = INH ( OR (x,y), AND (x,y) )