|
| 1 | +import numpy |
| 2 | +from matplotlib import pyplot |
| 3 | + |
| 4 | + |
| 5 | +def add_ones(samples): |
| 6 | + """ |
| 7 | + :param samples: array of input arrays |
| 8 | + :return: array of training arrays to which 1 was added at the end for bias |
| 9 | + """ |
| 10 | + sample_number, inputs = numpy.shape(samples) |
| 11 | + ones = numpy.ones((sample_number, 1)) |
| 12 | + return numpy.hstack((samples, ones)) |
| 13 | + |
| 14 | + |
| 15 | +def create_weights(inputs=2, hidden_layer_neurons=2, outputs=1): |
| 16 | + """ |
| 17 | + :param inputs: number of input (default 2) |
| 18 | + :param hidden_layer_neurons: number of neurons in hidden layer (default 2) |
| 19 | + :param outputs: number of output (default 1) |
| 20 | + :return: randomly generated weights for hidden layer and output layer |
| 21 | + """ |
| 22 | + hidden_layer_weights = numpy.random.uniform(0.1, 0.9, (inputs + 1, hidden_layer_neurons)) |
| 23 | + output_layer_weights = numpy.random.uniform(0.1, 0.9, (hidden_layer_neurons + 1, outputs)) |
| 24 | + return {"hidden": hidden_layer_weights, "output": output_layer_weights} |
| 25 | + |
| 26 | + |
| 27 | +def sigmoid(inputs): |
| 28 | + """ |
| 29 | + :param inputs: array of arrays |
| 30 | + :return: sigmoid value of each input |
| 31 | + """ |
| 32 | + return 1 / (1 + numpy.exp(-inputs)) |
| 33 | + |
| 34 | + |
| 35 | +def derivative(inputs): |
| 36 | + """ |
| 37 | + :param inputs: array of arrays |
| 38 | + :return: sigmoid derivative value of each input |
| 39 | + """ |
| 40 | + return inputs * (1 - inputs) |
| 41 | + |
| 42 | + |
| 43 | +def mean_squared_error(errors, samples=4, outputs=1): |
| 44 | + """ |
| 45 | + :param errors: array of differences between expected output and computed output |
| 46 | + :param samples: number of samples (default 4) |
| 47 | + :param outputs: number of output values (default 1) |
| 48 | + :return: mean squared error value of each error |
| 49 | + """ |
| 50 | + return numpy.sum(numpy.power(errors, 2)) / (samples * outputs) |
| 51 | + |
| 52 | + |
| 53 | +def train(samples, labels, epochs=10000, learning_rate=0.5, minimum_loss=0.001, sample_number=4, |
| 54 | + input_number=2, hidden_layer_neurons=2, output_number=1): |
| 55 | + """ |
| 56 | + :param samples: array of input arrays |
| 57 | + :param labels: expected output array |
| 58 | + :param epochs: number of iterations (default 10000) |
| 59 | + :param learning_rate: step size (default 0.5) |
| 60 | + :param minimum_loss: minimum mean squared error value (default 0.001) |
| 61 | + :param sample_number: number of samples |
| 62 | + :param input_number: number of inputs (default 2) |
| 63 | + :param hidden_layer_neurons: number of neurons in hidden layer (default 2) |
| 64 | + :param output_number: number of outputs (default 1) |
| 65 | + :return: mean squared error value for each iteration and computed output values |
| 66 | + """ |
| 67 | + losses = list() |
| 68 | + outputs = None |
| 69 | + weights = create_weights(input_number, hidden_layer_neurons, output_number) |
| 70 | + for epoch in range(epochs): |
| 71 | + """ |
| 72 | + Computes outputs and mean squared error (feed forward). |
| 73 | + """ |
| 74 | + hidden_outputs = add_ones(sigmoid(numpy.dot(samples, weights["hidden"]))) |
| 75 | + outputs = sigmoid(numpy.dot(hidden_outputs, weights["output"])) |
| 76 | + errors = labels - outputs |
| 77 | + losses.append(mean_squared_error(errors, sample_number, output_number)) |
| 78 | + """ |
| 79 | + Computes gradients and updates weights (backpropagation). |
| 80 | + """ |
| 81 | + output_errors = derivative(outputs) * errors |
| 82 | + weights["output"] += numpy.transpose(numpy.sum(learning_rate * hidden_outputs * output_errors, |
| 83 | + axis=0, keepdims=True)) |
| 84 | + hidden_errors = derivative(hidden_outputs[:, :-1]) * output_errors * numpy.transpose(weights["output"][:-1, :]) |
| 85 | + for neuron in range(hidden_layer_neurons): |
| 86 | + weights["hidden"][:, neuron:neuron + 1] += numpy.transpose(numpy.sum(learning_rate * samples * |
| 87 | + hidden_errors[:, neuron:neuron + 1], |
| 88 | + axis=0, keepdims=True)) |
| 89 | + if losses[-1] < minimum_loss: |
| 90 | + break |
| 91 | + return losses, outputs |
| 92 | + |
| 93 | + |
| 94 | +def show_performance(losses): |
| 95 | + """ |
| 96 | + Plots loss versus epoch graph. |
| 97 | + :param losses: array of loss values |
| 98 | + """ |
| 99 | + pyplot.figure() |
| 100 | + pyplot.plot(losses) |
| 101 | + pyplot.xlabel("Epochs") |
| 102 | + pyplot.ylabel("Loss") |
| 103 | + pyplot.show() |
| 104 | + |
| 105 | + |
| 106 | +def main(): |
| 107 | + """ |
| 108 | + Reads information from file. |
| 109 | + Shows performance and result of algorithm. |
| 110 | + """ |
| 111 | + with open("data.txt") as file: |
| 112 | + data = file.read() |
| 113 | + data = data.split('\n') |
| 114 | + epochs = int(data[0]) |
| 115 | + learning_rate = float(data[1]) |
| 116 | + samples = add_ones(numpy.array([numpy.array(list(map(int, line.split()))) for line in data[2:6]])) |
| 117 | + labels = numpy.array([numpy.array([int(label)]) for label in data[6].split()]) |
| 118 | + losses, outputs = train(samples, labels, epochs, learning_rate) |
| 119 | + show_performance(losses) |
| 120 | + print(str(numpy.transpose(outputs)).replace('[', '').replace(']', '')) |
| 121 | + |
| 122 | + |
| 123 | +if __name__ == '__main__': |
| 124 | + main() |
0 commit comments