..Back
×
Single layer NN in javascript
Author: Win Aung Cho2025-05-06 06:15:03AM
Simple single layer neural network example. 2 inputs and one output. Sample shows XOR input and tested for one.
class NeuralNetwork {
constructor(inputSize, hiddenSize, outputSize) {
this.weightsInputHidden = this.randomMatrix(inputSize, hiddenSize);
this.weightsHiddenOutput = this.randomMatrix(hiddenSize, outputSize);
this.biasHidden = Array(hiddenSize).fill(0);
this.biasOutput = Array(outputSize).fill(0);
this.learningRate = 0.1;
}
randomMatrix(rows, cols) {
return Array(rows).fill().map(() => Array(cols).fill().map(() => Math.random() - 0.5));
}
sigmoid(x) {
return 1 / (1 + Math.exp(-x));
}
sigmoidDerivative(x) {
return x * (1 - x);
}
forward(input) {
this.hidden = this.biasHidden.map((bias, i) => this.sigmoid(input.reduce((sum, val, j) => sum + val * this.weightsInputHidden[j][i], bias)));
this.output = this.biasOutput.map((bias, i) => this.sigmoid(this.hidden.reduce((sum, val, j) => sum + val * this.weightsHiddenOutput[j][i], bias)));
return this.output;
}
backpropagate(input, expected) {
const outputError = this.output.map((out, i) => expected[i] - out);
const outputDelta = outputError.map((err, i) => err * this.sigmoidDerivative(this.output[i]));
const hiddenError = this.hidden.map((hid, i) => outputDelta.reduce((sum, delta, j) => sum + delta * this.weightsHiddenOutput[i][j], 0));
const hiddenDelta = hiddenError.map((err, i) => err * this.sigmoidDerivative(this.hidden[i]));
for(let i = 0; i < this.weightsHiddenOutput.length; i++) {
for(let j = 0; j < this.weightsHiddenOutput[i].length; j++) {
this.weightsHiddenOutput[i][j] += this.hidden[i] * outputDelta[j] * this.learningRate;
}
}
for(let i = 0; i < this.weightsInputHidden.length; i++) {
for(let j = 0; j < this.weightsInputHidden[i].length; j++) {
this.weightsInputHidden[i][j] += input[i] * hiddenDelta[j] * this.learningRate;
}
}
for(let i = 0; i < this.biasHidden.length; i++) {
this.biasHidden[i] += hiddenDelta[i] * this.learningRate;
}
for(let i = 0; i < this.biasOutput.length; i++) {
this.biasOutput[i] += outputDelta[i] * this.learningRate;
}
}
train(data, epochs) {
for(let epoch = 0; epoch < epochs; epoch++) {
data.forEach(([
input, output
]) => {
this.forward(input);
this.backpropagate(input, output);
});
}
}
}
var nn = new NeuralNetwork(2, 8, 1);
var data = [
[[0, 0], [0]],
[[0, 1], [1]],
[[1, 0], [1]],
[[1, 1], [0]]
];
nn.train(data, 10000);
var r = [];
for (var d of data){
r.push(nn.forward(d[0]));
}
alert(JSON.stringify(r));
Author: Win Aung Cho