1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
#include <iostream> #include <vector> #include <cmath> #include <cstdlib> #include <ctime> using namespace std; // Sigmoid activation function double sigmoid(double x) { return 1.0 / (1.0 + exp(-x)); } // Derivative of the sigmoid function double sigmoidDerivative(double x) { return x * (1.0 - x); } // Neural network class class NeuralNetwork { private: vector<vector<double>> weights; vector<double> biases; vector<double> outputs; public: NeuralNetwork(int inputSize, int outputSize) { // Initialize weights and biases weights.resize(outputSize, vector<double>(inputSize)); biases.resize(outputSize); outputs.resize(outputSize); srand(static_cast<unsigned>(time(0))); // Seed for random number generation for (int i = 0; i < outputSize; ++i) { biases[i] = static_cast<double>(rand()) / RAND_MAX; // Random bias for (int j = 0; j < inputSize; ++j) { weights[i][j] = static_cast<double>(rand()) / RAND_MAX; // Random weight } } } // Forward propagation void forward(const vector<double>& inputs) { for (size_t i = 0; i < weights.size(); ++i) { double activation = biases[i]; for (size_t j = 0; j < weights[i].size(); ++j) { activation += weights[i][j] * inputs[j]; } outputs[i] = sigmoid(activation); } } // Print the network outputs void printOutputs() const { cout << "Outputs: "; for (const double& output : outputs) { cout << output << " "; } cout << endl; } }; int main() { int inputSize = 3; int outputSize = 1; NeuralNetwork nn(inputSize, outputSize); vector<double> inputs = {0.5, 0.3, 0.2}; // Example inputs cout << "Running neural network simulation..." << endl; nn.forward(inputs); nn.printOutputs(); return 0; } |
Explanation
- Headers:
<iostream>
: For input and output operations.<vector>
: For using thestd::vector
container.<cmath>
: For mathematical functions likeexp()
.<cstdlib>
: Forrand()
andsrand()
functions.<ctime>
: Fortime()
function to seed the random number generator.
- Activation Function:
double sigmoid(double x)
: Computes the sigmoid activation function.double sigmoidDerivative(double x)
: Computes the derivative of the sigmoid function (not used in this simple example but essential for learning).
- NeuralNetwork Class:
- Attributes:
weights
: 2D vector holding the weights of the neurons.biases
: Vector holding the biases for each neuron.outputs
: Vector to store the output of each neuron.
- Constructor:
- Initializes weights and biases with random values.
- Methods:
void forward(const vector<double>& inputs)
: Performs forward propagation by computing the weighted sum of inputs, adding the bias, and applying the sigmoid activation function.void printOutputs() const
: Prints the output of the network.
- Attributes:
- Main Function:
- Initialization:
- Creates a
NeuralNetwork
object with a specified input size and output size. - Defines a vector of inputs.
- Creates a
- Simulation:
- Runs forward propagation with the example inputs.
- Prints the network outputs.
- Initialization:
Notes:
- Simple ANN: This example uses a single-layer neural network with randomly initialized weights and biases. It demonstrates basic forward propagation.
- No Training: This program does not include training or backpropagation; it only shows how to perform forward propagation with a neural network.
- Activation Function: The sigmoid function is used for activation, which is common in binary classification tasks.