From 2e939aeaf0aad44486293e9b9ec403f9cadfaeb2 Mon Sep 17 00:00:00 2001 From: JeromeIsaac1 <2237787769@qq.com> Date: Fri, 11 Jul 2025 22:57:01 +0800 Subject: [PATCH 1/2] Fix #8785: Refactor neural network with public reference and backpropagation --- neural_network/simple_neural_network.py | 116 +++++++++++++----------- 1 file changed, 64 insertions(+), 52 deletions(-) diff --git a/neural_network/simple_neural_network.py b/neural_network/simple_neural_network.py index 8751a389..a5fab302 100644 --- a/neural_network/simple_neural_network.py +++ b/neural_network/simple_neural_network.py @@ -1,63 +1,75 @@ """ -Forward propagation explanation: -https://towardsdatascience.com/forward-propagation-in-neural-networks-simplified-math-and-code-version-bbcfef6f9250 +Simple Neural Network with Backpropagation +Reference: https://mattmazur.com/2015/03/17/a-step-by-step-backpropagation-example/ """ -import math -import random +import numpy as np +import matplotlib.pyplot as plt -# Sigmoid -def sigmoid_function(value: float, deriv: bool = False) -> float: - """Return the sigmoid function of a float. - - >>> sigmoid_function(3.5) - 0.9706877692486436 - >>> sigmoid_function(3.5, True) - -8.75 +def sigmoid(x: np.ndarray, deriv: bool = False) -> np.ndarray: """ - if deriv: - return value * (1 - value) - return 1 / (1 + math.exp(-value)) - - -# Initial Value -INITIAL_VALUE = 0.02 - - -def forward_propagation(expected: int, number_propagations: int) -> float: - """Return the value found after the forward propagation training. - - >>> res = forward_propagation(32, 450_000) # Was 10_000_000 - >>> res > 31 and res < 33 - True + Compute the sigmoid function or its derivative. - >>> res = forward_propagation(32, 1000) - >>> res > 31 and res < 33 - False + >>> sigmoid(np.array([0.5])) + array([0.62245933]) + >>> sigmoid(np.array([0.5]), deriv=True) + array([0.23500371]) """ - - # Random weight - weight = float(2 * (random.randint(1, 100)) - 1) - - for _ in range(number_propagations): - # Forward propagation - layer_1 = sigmoid_function(INITIAL_VALUE * weight) - # How much did we miss? - layer_1_error = (expected / 100) - layer_1 - # Error delta - layer_1_delta = layer_1_error * sigmoid_function(layer_1, True) - # Update weight - weight += INITIAL_VALUE * layer_1_delta - - return layer_1 * 100 + # 先计算sigmoid输出值 + sigmoid_output = 1 / (1 + np.exp(-x)) + if deriv: + # 导数 = sigmoid输出 * (1 - sigmoid输出) + return sigmoid_output * (1 - sigmoid_output) + return sigmoid_output + + +class SimpleNeuralNetwork: + def __init__(self, input_size: int, hidden_size: int, output_size: int): + """Initialize weights with values between -1 and 1""" + self.weights_input_hidden = 2 * np.random.random((input_size, hidden_size)) - 1 + self.weights_hidden_output = 2 * np.random.random((hidden_size, output_size)) - 1 + + def train(self, X: np.ndarray, y: np.ndarray, iterations: int, learning_rate: float = 0.1): + """Train network using backpropagation""" + error_history = [] + for _ in range(iterations): + # Forward propagation + layer_hidden = sigmoid(np.dot(X, self.weights_input_hidden)) + layer_output = sigmoid(np.dot(layer_hidden, self.weights_hidden_output)) + + # Calculate error + output_error = y - layer_output + error_history.append(np.mean(np.abs(output_error))) + + # Backpropagation + output_delta = output_error * sigmoid(layer_output, deriv=True) + hidden_error = output_delta.dot(self.weights_hidden_output.T) + hidden_delta = hidden_error * sigmoid(layer_hidden, deriv=True) + + # Update weights + self.weights_hidden_output += learning_rate * layer_hidden.T.dot(output_delta) + self.weights_input_hidden += learning_rate * X.T.dot(hidden_delta) + + # Plot training error + plt.plot(error_history) + plt.xlabel("Iterations") + plt.ylabel("Error") + plt.title("Training Error Curve") + plt.savefig("training_error.png") # 保存误差曲线图用于验证 + + def predict(self, X: np.ndarray) -> np.ndarray: + """Make predictions with trained network""" + layer_hidden = sigmoid(np.dot(X, self.weights_input_hidden)) + return sigmoid(np.dot(layer_hidden, self.weights_hidden_output)) if __name__ == "__main__": - import doctest - - doctest.testmod() - - expected = int(input("Expected value: ")) - number_propagations = int(input("Number of propagations: ")) - print(forward_propagation(expected, number_propagations)) + # Example: Train on XOR problem + X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]]) + y = np.array([[0], [1], [1], [0]]) + + nn = SimpleNeuralNetwork(2, 2, 1) + nn.train(X, y, 10000) + print("Predictions after training:") + print(nn.predict(X)) \ No newline at end of file -- Gitee From 564a436bd1c25abb0f0d4581f55b3224a12fd6fb Mon Sep 17 00:00:00 2001 From: JeromeIsaac1 <2237787769@qq.com> Date: Fri, 11 Jul 2025 23:13:09 +0800 Subject: [PATCH 2/2] Fix #8785: Refactor neural network with public reference and backpropagation --- .../tests/test_simple_neural_network.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 neural_network/tests/test_simple_neural_network.py diff --git a/neural_network/tests/test_simple_neural_network.py b/neural_network/tests/test_simple_neural_network.py new file mode 100644 index 00000000..4081b2ff --- /dev/null +++ b/neural_network/tests/test_simple_neural_network.py @@ -0,0 +1,15 @@ +import numpy as np +from simple_neural_network import SimpleNeuralNetwork + +def test_xor_prediction(): + """Test if network can learn XOR problem""" + X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]]) + y = np.array([[0], [1], [1], [0]]) + + nn = SimpleNeuralNetwork(2, 2, 1) + nn.train(X, y, 10000) + predictions = nn.predict(X) + + # 验证预测结果与目标值误差小于0.2 + for i in range(len(X)): + assert abs(predictions[i][0] - y[i][0]) < 0.2, f"Test failed for input {X[i]}" \ No newline at end of file -- Gitee