TheAlgorithms/C++ 1.0.0
All the algorithms implemented in C++
Loading...
Searching...
No Matches
neural_network.cpp
Go to the documentation of this file.
1
30#include <algorithm>
31#include <cassert>
32#include <chrono>
33#include <cmath>
34#include <fstream>
35#include <iostream>
36#include <sstream>
37#include <string>
38#include <valarray>
39#include <vector>
40
41#include "vector_ops.hpp" // Custom header file for vector operations
42
46namespace machine_learning {
50namespace neural_network {
54namespace activations {
60double sigmoid(const double &x) { return 1.0 / (1.0 + std::exp(-x)); }
61
67double dsigmoid(const double &x) { return x * (1 - x); }
68
74double relu(const double &x) { return std::max(0.0, x); }
75
81double drelu(const double &x) { return x >= 0.0 ? 1.0 : 0.0; }
82
88double tanh(const double &x) { return 2 / (1 + std::exp(-2 * x)) - 1; }
89
95double dtanh(const double &x) { return 1 - x * x; }
96} // namespace activations
100namespace util_functions {
106double square(const double &x) { return x * x; }
112double identity_function(const double &x) { return x; }
113} // namespace util_functions
118namespace layers {
126 public:
127 // To store activation function and it's derivative
128 double (*activation_function)(const double &);
129 double (*dactivation_function)(const double &);
130 int neurons; // To store number of neurons (used in summary)
131 std::string activation; // To store activation name (used in summary)
132 std::vector<std::valarray<double>> kernel; // To store kernel (aka weights)
133
141 DenseLayer(const int &neurons, const std::string &activation,
142 const std::pair<size_t, size_t> &kernel_shape,
143 const bool &random_kernel) {
144 // Choosing activation (and it's derivative)
145 if (activation == "sigmoid") {
146 activation_function = neural_network::activations::sigmoid;
147 dactivation_function = neural_network::activations::sigmoid;
148 } else if (activation == "relu") {
149 activation_function = neural_network::activations::relu;
150 dactivation_function = neural_network::activations::drelu;
151 } else if (activation == "tanh") {
152 activation_function = neural_network::activations::tanh;
153 dactivation_function = neural_network::activations::dtanh;
154 } else if (activation == "none") {
155 // Set identity function in casse of none is supplied
156 activation_function =
157 neural_network::util_functions::identity_function;
158 dactivation_function =
159 neural_network::util_functions::identity_function;
160 } else {
161 // If supplied activation is invalid
162 std::cerr << "ERROR (" << __func__ << ") : ";
163 std::cerr << "Invalid argument. Expected {none, sigmoid, relu, "
164 "tanh} got ";
165 std::cerr << activation << std::endl;
166 std::exit(EXIT_FAILURE);
167 }
168 this->activation = activation; // Setting activation name
169 this->neurons = neurons; // Setting number of neurons
170 // Initialize kernel according to flag
171 if (random_kernel) {
172 uniform_random_initialization(kernel, kernel_shape, -1.0, 1.0);
173 } else {
174 unit_matrix_initialization(kernel, kernel_shape);
175 }
176 }
183 DenseLayer(const int &neurons, const std::string &activation,
184 const std::vector<std::valarray<double>> &kernel) {
185 // Choosing activation (and it's derivative)
186 if (activation == "sigmoid") {
187 activation_function = neural_network::activations::sigmoid;
188 dactivation_function = neural_network::activations::sigmoid;
189 } else if (activation == "relu") {
190 activation_function = neural_network::activations::relu;
191 dactivation_function = neural_network::activations::drelu;
192 } else if (activation == "tanh") {
193 activation_function = neural_network::activations::tanh;
194 dactivation_function = neural_network::activations::dtanh;
195 } else if (activation == "none") {
196 // Set identity function in casse of none is supplied
197 activation_function =
198 neural_network::util_functions::identity_function;
199 dactivation_function =
200 neural_network::util_functions::identity_function;
201 } else {
202 // If supplied activation is invalid
203 std::cerr << "ERROR (" << __func__ << ") : ";
204 std::cerr << "Invalid argument. Expected {none, sigmoid, relu, "
205 "tanh} got ";
206 std::cerr << activation << std::endl;
207 std::exit(EXIT_FAILURE);
208 }
209 this->activation = activation; // Setting activation name
210 this->neurons = neurons; // Setting number of neurons
211 this->kernel = kernel; // Setting supplied kernel values
212 }
213
219 DenseLayer(const DenseLayer &layer) = default;
220
224 ~DenseLayer() = default;
225
229 DenseLayer &operator=(const DenseLayer &layer) = default;
230
234 DenseLayer(DenseLayer &&) = default;
235
240};
241} // namespace layers
248 private:
249 std::vector<neural_network::layers::DenseLayer> layers; // To store layers
257 const std::vector<std::pair<int, std::string>> &config,
258 const std::vector<std::vector<std::valarray<double>>> &kernels) {
259 // First layer should not have activation
260 if (config.begin()->second != "none") {
261 std::cerr << "ERROR (" << __func__ << ") : ";
262 std::cerr
263 << "First layer can't have activation other than none got "
264 << config.begin()->second;
265 std::cerr << std::endl;
266 std::exit(EXIT_FAILURE);
267 }
268 // Network should have atleast two layers
269 if (config.size() <= 1) {
270 std::cerr << "ERROR (" << __func__ << ") : ";
271 std::cerr << "Invalid size of network, ";
272 std::cerr << "Atleast two layers are required";
273 std::exit(EXIT_FAILURE);
274 }
275 // Reconstructing all pretrained layers
276 for (size_t i = 0; i < config.size(); i++) {
278 config[i].first, config[i].second, kernels[i]));
279 }
280 std::cout << "INFO: Network constructed successfully" << std::endl;
281 }
288 std::vector<std::vector<std::valarray<double>>>
289 __detailed_single_prediction(const std::vector<std::valarray<double>> &X) {
290 std::vector<std::vector<std::valarray<double>>> details;
291 std::vector<std::valarray<double>> current_pass = X;
292 details.emplace_back(X);
293 for (const auto &l : layers) {
294 current_pass = multiply(current_pass, l.kernel);
295 current_pass = apply_function(current_pass, l.activation_function);
296 details.emplace_back(current_pass);
297 }
298 return details;
299 }
300
301 public:
306 NeuralNetwork() = default;
307
314 const std::vector<std::pair<int, std::string>> &config) {
315 // First layer should not have activation
316 if (config.begin()->second != "none") {
317 std::cerr << "ERROR (" << __func__ << ") : ";
318 std::cerr
319 << "First layer can't have activation other than none got "
320 << config.begin()->second;
321 std::cerr << std::endl;
322 std::exit(EXIT_FAILURE);
323 }
324 // Network should have atleast two layers
325 if (config.size() <= 1) {
326 std::cerr << "ERROR (" << __func__ << ") : ";
327 std::cerr << "Invalid size of network, ";
328 std::cerr << "Atleast two layers are required";
329 std::exit(EXIT_FAILURE);
330 }
331 // Separately creating first layer so it can have unit matrix
332 // as kernel.
334 config[0].first, config[0].second,
335 {config[0].first, config[0].first}, false));
336 // Creating remaining layers
337 for (size_t i = 1; i < config.size(); i++) {
339 config[i].first, config[i].second,
340 {config[i - 1].first, config[i].first}, true));
341 }
342 std::cout << "INFO: Network constructed successfully" << std::endl;
343 }
344
350 NeuralNetwork(const NeuralNetwork &model) = default;
351
355 ~NeuralNetwork() = default;
356
360 NeuralNetwork &operator=(const NeuralNetwork &model) = default;
361
366
371
380 std::pair<std::vector<std::vector<std::valarray<double>>>,
381 std::vector<std::vector<std::valarray<double>>>>
382 get_XY_from_csv(const std::string &file_name, const bool &last_label,
383 const bool &normalize, const int &slip_lines = 1) {
384 std::ifstream in_file; // Ifstream to read file
385 in_file.open(file_name.c_str(), std::ios::in); // Open file
386 // If there is any problem in opening file
387 if (!in_file.is_open()) {
388 std::cerr << "ERROR (" << __func__ << ") : ";
389 std::cerr << "Unable to open file: " << file_name << std::endl;
390 std::exit(EXIT_FAILURE);
391 }
392 std::vector<std::vector<std::valarray<double>>> X,
393 Y; // To store X and Y
394 std::string line; // To store each line
395 // Skip lines
396 for (int i = 0; i < slip_lines; i++) {
397 std::getline(in_file, line, '\n'); // Ignore line
398 }
399 // While file has information
400 while (!in_file.eof() && std::getline(in_file, line, '\n')) {
401 std::valarray<double> x_data,
402 y_data; // To store single sample and label
403 std::stringstream ss(line); // Constructing stringstream from line
404 std::string token; // To store each token in line (seprated by ',')
405 while (std::getline(ss, token, ',')) { // For each token
406 // Insert numerical value of token in x_data
407 x_data = insert_element(x_data, std::stod(token));
408 }
409 // If label is in last column
410 if (last_label) {
411 y_data.resize(this->layers.back().neurons);
412 // If task is classification
413 if (y_data.size() > 1) {
414 y_data[x_data[x_data.size() - 1]] = 1;
415 }
416 // If task is regrssion (of single value)
417 else {
418 y_data[0] = x_data[x_data.size() - 1];
419 }
420 x_data = pop_back(x_data); // Remove label from x_data
421 } else {
422 y_data.resize(this->layers.back().neurons);
423 // If task is classification
424 if (y_data.size() > 1) {
425 y_data[x_data[x_data.size() - 1]] = 1;
426 }
427 // If task is regrssion (of single value)
428 else {
429 y_data[0] = x_data[x_data.size() - 1];
430 }
431 x_data = pop_front(x_data); // Remove label from x_data
432 }
433 // Push collected X_data and y_data in X and Y
434 X.push_back({x_data});
435 Y.push_back({y_data});
436 }
437 // Normalize training data if flag is set
438 if (normalize) {
439 // Scale data between 0 and 1 using min-max scaler
440 X = minmax_scaler(X, 0.01, 1.0);
441 }
442 in_file.close(); // Closing file
443 return make_pair(X, Y); // Return pair of X and Y
444 }
445
451 std::vector<std::valarray<double>> single_predict(
452 const std::vector<std::valarray<double>> &X) {
453 // Get activations of all layers
455 // Return activations of last layer (actual predicted values)
456 return activations.back();
457 }
458
464 std::vector<std::vector<std::valarray<double>>> batch_predict(
465 const std::vector<std::vector<std::valarray<double>>> &X) {
466 // Store predicted values
467 std::vector<std::vector<std::valarray<double>>> predicted_batch(
468 X.size());
469 for (size_t i = 0; i < X.size(); i++) { // For every sample
470 // Push predicted values
471 predicted_batch[i] = this->single_predict(X[i]);
472 }
473 return predicted_batch; // Return predicted values
474 }
475
485 void fit(const std::vector<std::vector<std::valarray<double>>> &X_,
486 const std::vector<std::vector<std::valarray<double>>> &Y_,
487 const int &epochs = 100, const double &learning_rate = 0.01,
488 const size_t &batch_size = 32, const bool &shuffle = true) {
489 std::vector<std::vector<std::valarray<double>>> X = X_, Y = Y_;
490 // Both label and input data should have same size
491 if (X.size() != Y.size()) {
492 std::cerr << "ERROR (" << __func__ << ") : ";
493 std::cerr << "X and Y in fit have different sizes" << std::endl;
494 std::exit(EXIT_FAILURE);
495 }
496 std::cout << "INFO: Training Started" << std::endl;
497 for (int epoch = 1; epoch <= epochs; epoch++) { // For every epoch
498 // Shuffle X and Y if flag is set
499 if (shuffle) {
500 equal_shuffle(X, Y);
501 }
502 auto start =
503 std::chrono::high_resolution_clock::now(); // Start clock
504 double loss = 0,
505 acc = 0; // Initialize performance metrics with zero
506 // For each starting index of batch
507 for (size_t batch_start = 0; batch_start < X.size();
508 batch_start += batch_size) {
509 for (size_t i = batch_start;
510 i < std::min(X.size(), batch_start + batch_size); i++) {
511 std::vector<std::valarray<double>> grad, cur_error,
512 predicted;
513 auto activations = this->__detailed_single_prediction(X[i]);
514 // Gradients vector to store gradients for all layers
515 // They will be averaged and applied to kernel
516 std::vector<std::vector<std::valarray<double>>> gradients;
517 gradients.resize(this->layers.size());
518 // First initialize gradients to zero
519 for (size_t i = 0; i < gradients.size(); i++) {
521 gradients[i], get_shape(this->layers[i].kernel));
522 }
523 predicted = activations.back(); // Predicted vector
524 cur_error = predicted - Y[i]; // Absoulute error
525 // Calculating loss with MSE
526 loss += sum(apply_function(
527 cur_error, neural_network::util_functions::square));
528 // If prediction is correct
529 if (argmax(predicted) == argmax(Y[i])) {
530 acc += 1;
531 }
532 // For every layer (except first) starting from last one
533 for (size_t j = this->layers.size() - 1; j >= 1; j--) {
534 // Backpropogating errors
535 cur_error = hadamard_product(
536 cur_error,
538 activations[j + 1],
539 this->layers[j].dactivation_function));
540 // Calculating gradient for current layer
541 grad = multiply(transpose(activations[j]), cur_error);
542 // Change error according to current kernel values
543 cur_error = multiply(cur_error,
544 transpose(this->layers[j].kernel));
545 // Adding gradient values to collection of gradients
546 gradients[j] = gradients[j] + grad / double(batch_size);
547 }
548 // Applying gradients
549 for (size_t j = this->layers.size() - 1; j >= 1; j--) {
550 // Updating kernel (aka weights)
551 this->layers[j].kernel = this->layers[j].kernel -
552 gradients[j] * learning_rate;
553 }
554 }
555 }
556 auto stop =
557 std::chrono::high_resolution_clock::now(); // Stoping the clock
558 // Calculate time taken by epoch
559 auto duration =
560 std::chrono::duration_cast<std::chrono::microseconds>(stop -
561 start);
562 loss /= X.size(); // Averaging loss
563 acc /= X.size(); // Averaging accuracy
564 std::cout.precision(4); // set output precision to 4
565 // Printing training stats
566 std::cout << "Training: Epoch " << epoch << '/' << epochs;
567 std::cout << ", Loss: " << loss;
568 std::cout << ", Accuracy: " << acc;
569 std::cout << ", Taken time: " << duration.count() / 1e6
570 << " seconds";
571 std::cout << std::endl;
572 }
573 return;
574 }
575
587 void fit_from_csv(const std::string &file_name, const bool &last_label,
588 const int &epochs, const double &learning_rate,
589 const bool &normalize, const int &slip_lines = 1,
590 const size_t &batch_size = 32,
591 const bool &shuffle = true) {
592 // Getting training data from csv file
593 auto data =
594 this->get_XY_from_csv(file_name, last_label, normalize, slip_lines);
595 // Fit the model on training data
596 this->fit(data.first, data.second, epochs, learning_rate, batch_size,
597 shuffle);
598 return;
599 }
600
606 void evaluate(const std::vector<std::vector<std::valarray<double>>> &X,
607 const std::vector<std::vector<std::valarray<double>>> &Y) {
608 std::cout << "INFO: Evaluation Started" << std::endl;
609 double acc = 0, loss = 0; // initialize performance metrics with zero
610 for (size_t i = 0; i < X.size(); i++) { // For every sample in input
611 // Get predictions
612 std::vector<std::valarray<double>> pred =
613 this->single_predict(X[i]);
614 // If predicted class is correct
615 if (argmax(pred) == argmax(Y[i])) {
616 acc += 1; // Increment accuracy
617 }
618 // Calculating loss - Mean Squared Error
619 loss += sum(apply_function((Y[i] - pred),
620 neural_network::util_functions::square) *
621 0.5);
622 }
623 acc /= X.size(); // Averaging accuracy
624 loss /= X.size(); // Averaging loss
625 // Prinitng performance of the model
626 std::cout << "Evaluation: Loss: " << loss;
627 std::cout << ", Accuracy: " << acc << std::endl;
628 return;
629 }
630
638 void evaluate_from_csv(const std::string &file_name, const bool &last_label,
639 const bool &normalize, const int &slip_lines = 1) {
640 // Getting training data from csv file
641 auto data =
642 this->get_XY_from_csv(file_name, last_label, normalize, slip_lines);
643 // Evaluating model
644 this->evaluate(data.first, data.second);
645 return;
646 }
647
652 void save_model(const std::string &_file_name) {
653 std::string file_name = _file_name;
654 // Adding ".model" extension if it is not already there in name
655 if (file_name.find(".model") == file_name.npos) {
656 file_name += ".model";
657 }
658 std::ofstream out_file; // Ofstream to write in file
659 // Open file in out|trunc mode
660 out_file.open(file_name.c_str(),
661 std::ofstream::out | std::ofstream::trunc);
662 // If there is any problem in opening file
663 if (!out_file.is_open()) {
664 std::cerr << "ERROR (" << __func__ << ") : ";
665 std::cerr << "Unable to open file: " << file_name << std::endl;
666 std::exit(EXIT_FAILURE);
667 }
707 // Saving model in the same format
708 out_file << layers.size();
709 out_file << std::endl;
710 for (const auto &layer : this->layers) {
711 out_file << layer.neurons << ' ' << layer.activation << std::endl;
712 const auto shape = get_shape(layer.kernel);
713 out_file << shape.first << ' ' << shape.second << std::endl;
714 for (const auto &row : layer.kernel) {
715 for (const auto &val : row) {
716 out_file << val << ' ';
717 }
718 out_file << std::endl;
719 }
720 }
721 std::cout << "INFO: Model saved successfully with name : ";
722 std::cout << file_name << std::endl;
723 out_file.close(); // Closing file
724 return;
725 }
726
732 NeuralNetwork load_model(const std::string &file_name) {
733 std::ifstream in_file; // Ifstream to read file
734 in_file.open(file_name.c_str()); // Openinig file
735 // If there is any problem in opening file
736 if (!in_file.is_open()) {
737 std::cerr << "ERROR (" << __func__ << ") : ";
738 std::cerr << "Unable to open file: " << file_name << std::endl;
739 std::exit(EXIT_FAILURE);
740 }
741 std::vector<std::pair<int, std::string>> config; // To store config
742 std::vector<std::vector<std::valarray<double>>>
743 kernels; // To store pretrained kernels
744 // Loading model from saved file format
745 size_t total_layers = 0;
746 in_file >> total_layers;
747 for (size_t i = 0; i < total_layers; i++) {
748 int neurons = 0;
749 std::string activation;
750 size_t shape_a = 0, shape_b = 0;
751 std::vector<std::valarray<double>> kernel;
752 in_file >> neurons >> activation >> shape_a >> shape_b;
753 for (size_t r = 0; r < shape_a; r++) {
754 std::valarray<double> row(shape_b);
755 for (size_t c = 0; c < shape_b; c++) {
756 in_file >> row[c];
757 }
758 kernel.push_back(row);
759 }
760 config.emplace_back(make_pair(neurons, activation));
761 ;
762 kernels.emplace_back(kernel);
763 }
764 std::cout << "INFO: Model loaded successfully" << std::endl;
765 in_file.close(); // Closing file
766 return NeuralNetwork(
767 config, kernels); // Return instance of NeuralNetwork class
768 }
769
773 void summary() {
774 // Printing Summary
775 std::cout
776 << "==============================================================="
777 << std::endl;
778 std::cout << "\t\t+ MODEL SUMMARY +\t\t\n";
779 std::cout
780 << "==============================================================="
781 << std::endl;
782 for (size_t i = 1; i <= layers.size(); i++) { // For every layer
783 std::cout << i << ")";
784 std::cout << " Neurons : "
785 << layers[i - 1].neurons; // number of neurons
786 std::cout << ", Activation : "
787 << layers[i - 1].activation; // activation
788 std::cout << ", kernel Shape : "
789 << get_shape(layers[i - 1].kernel); // kernel shape
790 std::cout << std::endl;
791 }
792 std::cout
793 << "==============================================================="
794 << std::endl;
795 return;
796 }
797};
798} // namespace neural_network
799} // namespace machine_learning
800
805static void test() {
806 // Creating network with 3 layers for "iris.csv"
809 {4, "none"}, // First layer with 3 neurons and "none" as activation
810 {6,
811 "relu"}, // Second layer with 6 neurons and "relu" as activation
812 {3, "sigmoid"} // Third layer with 3 neurons and "sigmoid" as
813 // activation
814 });
815 // Printing summary of model
816 myNN.summary();
817 // Training Model
818 myNN.fit_from_csv("iris.csv", true, 100, 0.3, false, 2, 32, true);
819 // Testing predictions of model
821 myNN.single_predict({{5, 3.4, 1.6, 0.4}})) == 0);
823 myNN.single_predict({{6.4, 2.9, 4.3, 1.3}})) == 1);
825 myNN.single_predict({{6.2, 3.4, 5.4, 2.3}})) == 2);
826 return;
827}
828
833int main() {
834 // Testing
835 test();
836 return 0;
837}
NeuralNetwork(const NeuralNetwork &model)=default
void fit(const std::vector< std::vector< std::valarray< double > > > &X_, const std::vector< std::vector< std::valarray< double > > > &Y_, const int &epochs=100, const double &learning_rate=0.01, const size_t &batch_size=32, const bool &shuffle=true)
NeuralNetwork & operator=(NeuralNetwork &&)=default
std::vector< std::vector< std::valarray< double > > > __detailed_single_prediction(const std::vector< std::valarray< double > > &X)
void evaluate_from_csv(const std::string &file_name, const bool &last_label, const bool &normalize, const int &slip_lines=1)
std::vector< std::valarray< double > > single_predict(const std::vector< std::valarray< double > > &X)
NeuralNetwork(const std::vector< std::pair< int, std::string > > &config, const std::vector< std::vector< std::valarray< double > > > &kernels)
void save_model(const std::string &_file_name)
void fit_from_csv(const std::string &file_name, const bool &last_label, const int &epochs, const double &learning_rate, const bool &normalize, const int &slip_lines=1, const size_t &batch_size=32, const bool &shuffle=true)
NeuralNetwork & operator=(const NeuralNetwork &model)=default
NeuralNetwork load_model(const std::string &file_name)
NeuralNetwork(const std::vector< std::pair< int, std::string > > &config)
std::pair< std::vector< std::vector< std::valarray< double > > >, std::vector< std::vector< std::valarray< double > > > > get_XY_from_csv(const std::string &file_name, const bool &last_label, const bool &normalize, const int &slip_lines=1)
std::vector< std::vector< std::valarray< double > > > batch_predict(const std::vector< std::vector< std::valarray< double > > > &X)
void evaluate(const std::vector< std::vector< std::valarray< double > > > &X, const std::vector< std::vector< std::valarray< double > > > &Y)
DenseLayer(const int &neurons, const std::string &activation, const std::pair< size_t, size_t > &kernel_shape, const bool &random_kernel)
DenseLayer & operator=(DenseLayer &&)=default
DenseLayer(const DenseLayer &layer)=default
DenseLayer & operator=(const DenseLayer &layer)=default
DenseLayer(const int &neurons, const std::string &activation, const std::vector< std::valarray< double > > &kernel)
int data[MAX]
test data
Various activation functions used in Neural network.
This namespace contains layers used in MLP.
A* search algorithm
std::valarray< T > insert_element(const std::valarray< T > &A, const T &ele)
size_t argmax(const std::vector< std::valarray< T > > &A)
std::vector< std::valarray< T > > multiply(const std::vector< std::valarray< T > > &A, const std::vector< std::valarray< T > > &B)
T sum(const std::vector< std::valarray< T > > &A)
std::vector< std::valarray< T > > transpose(const std::vector< std::valarray< T > > &A)
void unit_matrix_initialization(std::vector< std::valarray< T > > &A, const std::pair< size_t, size_t > &shape)
std::valarray< T > pop_front(const std::valarray< T > &A)
std::pair< size_t, size_t > get_shape(const std::vector< std::valarray< T > > &A)
void uniform_random_initialization(std::vector< std::valarray< T > > &A, const std::pair< size_t, size_t > &shape, const T &low, const T &high)
void zeroes_initialization(std::vector< std::valarray< T > > &A, const std::pair< size_t, size_t > &shape)
std::vector< std::vector< std::valarray< T > > > minmax_scaler(const std::vector< std::vector< std::valarray< T > > > &A, const T &low, const T &high)
std::vector< std::valarray< T > > hadamard_product(const std::vector< std::valarray< T > > &A, const std::vector< std::valarray< T > > &B)
std::vector< std::valarray< T > > apply_function(const std::vector< std::valarray< T > > &A, T(*func)(const T &))
std::valarray< T > pop_back(const std::valarray< T > &A)
void equal_shuffle(std::vector< std::vector< std::valarray< T > > > &A, std::vector< std::vector< std::valarray< T > > > &B)
Neural Network or Multilayer Perceptron.
Various utility functions used in Neural network.
static void test()
int main()
Main function.
Various functions for vectors associated with [NeuralNetwork (aka Multilayer Perceptron)] (https://en...