From 8d88700a660959372feebb01b5afc377ba3d9c31 Mon Sep 17 00:00:00 2001
From: Jakhes <dean.schmitz@schmitzbauer.de>
Date: Tue, 11 Oct 2022 19:55:12 +0200
Subject: [PATCH] Update adaboost and fully Add tests

---
 src/methods/ada_boost/ada_boost.cpp           | 204 -----------
 src/methods/ada_boost/ada_boost_test.pl       |  38 ---
 src/methods/{ada_boost => adaboost}/Makefile  |   2 +-
 src/methods/adaboost/adaboost.cpp             | 314 +++++++++++++++++
 .../ada_boost.pl => adaboost/adaboost.pl}     |  45 ++-
 src/methods/adaboost/adaboost_test.pl         | 321 ++++++++++++++++++
 6 files changed, 669 insertions(+), 255 deletions(-)
 delete mode 100644 src/methods/ada_boost/ada_boost.cpp
 delete mode 100644 src/methods/ada_boost/ada_boost_test.pl
 rename src/methods/{ada_boost => adaboost}/Makefile (92%)
 create mode 100644 src/methods/adaboost/adaboost.cpp
 rename src/methods/{ada_boost/ada_boost.pl => adaboost/adaboost.pl} (64%)
 create mode 100644 src/methods/adaboost/adaboost_test.pl

diff --git a/src/methods/ada_boost/ada_boost.cpp b/src/methods/ada_boost/ada_boost.cpp
deleted file mode 100644
index 7b2fad5..0000000
--- a/src/methods/ada_boost/ada_boost.cpp
+++ /dev/null
@@ -1,204 +0,0 @@
-#include <sicstus/sicstus.h>
-/* ex_glue.h is generated by splfr from the foreign/[2,3] facts.
-   Always include the glue header in your foreign resource code.
-*/
-#include "ada_boost_glue.h"
-#include <mlpack/methods/adaboost/adaboost.hpp>
-#include <mlpack/core.hpp>
-
-// including helper functions for converting between arma structures and arrays
-#include "../../helper_files/helper.hpp"
-
-// some of the most used namespaces
-using namespace arma;
-using namespace mlpack;
-using namespace std;
-using namespace mlpack::adaboost;
-
-using namespace mlpack::tree;
-using namespace mlpack::perceptron;
-using namespace mlpack::util;
-
-// Global Variable of the GlobalMethodObject object so it can be accessed from all functions
-AdaBoost<> adaBoostPerceptron;
-AdaBoost<DecisionTree<>> adaBoostDecisionStump;
-
-bool usingPerceptron = true;
-
-
-
-
-// TODO: 
-// input: 	const MatType & 				data, 
-//			const arma::Row< size_t > & 	labels,
-//			const size_t 					numClasses,
-//			const WeakLearnerType & 		other,
-//			const size_t 					iterations = 100,
-//			const double 					tolerance = 1e-6
-// output: 
-// description: 
-void initModelWithTraining(float *dataArr, SP_integer dataSize, SP_integer dataRowNum, float *labelsArr, SP_integer labelSize, SP_integer numClasses, char const *learner,SP_integer iterations, double tolerance)
-{
-	// convert the Prolog arrays to arma::mat
-	mat data = convertArrayToMat(dataArr, dataSize, dataRowNum);
-	// convert the Prolog arrays to arma::rowvec
-	rowvec labels = convertArrayToRowvec(labelsArr, labelSize);
-	Row<size_t> convLabels = conv_to<Row<size_t>>::from(labels);
-
-	if(strcmp(learner, "perceptron") == 0)
-	{
-		cout << "perceptron" << endl;
-		usingPerceptron = true;
-		adaBoostPerceptron = AdaBoost<Perceptron<>>(data, convLabels, numClasses, iterations, tolerance);
-	}
-	else if (strcmp(learner, "decision_stump") == 0)
-	{
-		cout << "decision_stump" << endl;
-		usingPerceptron = false;
-		adaBoostDecisionStump = AdaBoost<DecisionStump<>>(data, convLabels, numClasses, iterations, tolerance);
-	}
-	else cout << "wrong input" << endl;
-}
-
-// TODO: 
-// input: 	const double tolerance = 1e-6	
-// output: 
-// description: 
-void initModelNoTraining(double tol = 1e-6, char const *learner = "perceptron")
-{
-	if(strcmp(learner, "perceptron") == 0) 
-	{
-		cout << "perceptron" << endl;
-		usingPerceptron = true;
-		adaBoostPerceptron = AdaBoost<Perceptron<>>(tol);
-	}
-	else if (strcmp(learner, "decision_stump") == 0)
-	{
-		cout << "decision_stump" << endl;
-		usingPerceptron = false;
-		adaBoostDecisionStump = AdaBoost<DecisionStump<>>(tol);
-	}
-	else cout << "wrong input" << endl;
-}
-
-// TODO: 
-// input: 	const MatType & 		test,
-//			arma::Row< size_t > & 	predictedLabels,
-//			arma::mat & 			probabilities
-// output: 
-// description: 
-void classify(float *testMatArr, SP_integer testMatSize, SP_integer testMatRowNum, float **predLabelsArr, SP_integer *predLabelsArrSize, float **probMatArr, SP_integer *probMatColNum, SP_integer *probMatRowNum)
-{
-	// convert the Prolog arrays to arma::mat
-	mat data = convertArrayToMat(testMatArr, testMatSize, testMatRowNum);
-
-	// get the ReturnVector get the ReturnMat
-	Row< size_t > predLabelsVector;
-	mat probMat;
-
-	if(usingPerceptron) 
-	{
-		adaBoostPerceptron.Classify(data, predLabelsVector, probMat);
-	}
-	else
-	{
-		adaBoostDecisionStump.Classify(data, predLabelsVector, probMat);
-	}
-
-	vec convPredLabels = conv_to<vec>::from(predLabelsVector);
-
-	// return the Vector lenght
-	*predLabelsArrSize = convPredLabels.n_elem;
-
-	// return the Vector as Array
-	*predLabelsArr = convertToArray(convPredLabels);
-
-	// return the Matrix dimensions
-	*probMatColNum = probMat.n_cols;
-	*probMatRowNum = probMat.n_rows;
-
-	// return the Matrix as one long Array
-	*probMatArr = convertToArray(probMat);
-}
-
-// TODO: 
-// input: 
-// output: 	size_t 	classesNum
-// description: 
-SP_integer numClasses()
-{
-	if(usingPerceptron) 
-	{
-		return adaBoostPerceptron.NumClasses();
-	}
-	else
-	{
-		return adaBoostDecisionStump.NumClasses();
-	}
-}
-
-// TODO: 
-// input: 
-// output: 	double 	tol
-// description: 
-double getTolerance()
-{
-	if(usingPerceptron) 
-	{
-		return adaBoostPerceptron.Tolerance();
-	}
-	else
-	{
-		return adaBoostDecisionStump.Tolerance();
-	}
-}
-
-// TODO: 
-// input: 
-// output: 	double&	tol
-// description: 
-void modifyTolerance(double newTol)
-{
-	if(usingPerceptron) 
-	{
-		double& tol = adaBoostPerceptron.Tolerance();
-		tol = newTol;
-	}
-	else
-	{
-		double& tol = adaBoostDecisionStump.Tolerance();
-		tol = newTol;
-	}
-}
-
-// TODO: 
-// input: 	const MatType & 				data,
-//			const arma::Row< size_t > & 	labels,
-//			const size_t 					numClasses,
-//			const WeakLearnerType & 		learner,
-//			const size_t 					iterations = 100,
-//			const double 					tolerance = 1e-6
-//
-// output: 	double upper bound training error
-// description: 
-double train(float *dataArr, SP_integer dataSize, SP_integer dataRowNum, float *labelsArr, SP_integer labelSize, SP_integer numClasses, char const *learner,SP_integer iterations, double tolerance)
-{
-	// convert the Prolog arrays to arma::mat
-	mat data = convertArrayToMat(dataArr, dataSize, dataRowNum);
-	// convert the Prolog arrays to arma::rowvec
-	rowvec labels = convertArrayToRowvec(labelsArr, labelSize);
-	Row<size_t> convLabels = conv_to<Row<size_t>>::from(labels);
-
-	if(strcmp(learner, "perceptron") == 0 && usingPerceptron) 
-	{
-		cout << "perceptron" << endl;
-		return adaBoostPerceptron.Train(data, convLabels, numClasses, Perceptron(), iterations, tolerance);
-	}
-	else if (strcmp(learner, "decision_stump") == 0 && !usingPerceptron)
-	{
-		cout << "decision_stump" << endl;
-		return adaBoostDecisionStump.Train(data, convLabels, numClasses, DecisionStump<>(), iterations, tolerance);
-	}
-	else cout << "wrong input" << endl;
-	return 0;
-}
diff --git a/src/methods/ada_boost/ada_boost_test.pl b/src/methods/ada_boost/ada_boost_test.pl
deleted file mode 100644
index ab199e7..0000000
--- a/src/methods/ada_boost/ada_boost_test.pl
+++ /dev/null
@@ -1,38 +0,0 @@
-:- use_module(library(plunit)).
-
-:- use_module(ada_boost).
-:- use_module('../../helper_files/helper.pl').
-
-reset_Model :-
-        initModelNoTraining(0.0001, perceptron).
-
-:- begin_tests(lists).
-
-%% alpha tests
-test(tol_after_train, [true(A =:= 0.0001), true(B =:= 1)]) :-
-        reset_Model,
-        convert_list_to_float_array([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5],3, array(Xsize, Xrownum, X)),
-        convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
-        train(X,Xsize, Xrownum,Y, Ysize, 2, perceptron, 50, 0.0001, B),
-        getTolerance(A).
-
-test(tol_after_train2, [true(A =:= 0.0001), true(B =:= 1)]) :-
-        reset_Model,
-        convert_list_to_float_array([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5],6, array(Xsize, Xrownum, X)),
-        convert_list_to_float_array([1,2], array(Ysize, Y)),
-        train(X,Xsize, Xrownum,Y, Ysize, 2, perceptron, 50, 0.0001, B),
-        getTolerance(A).
-
-test(tol_after_train_fail, [true(B =:= 1)]) :-
-        reset_Model,
-        convert_list_to_float_array([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5],2, array(Xsize, Xrownum, X)),
-        convert_list_to_float_array([1,2,2,2,1,1], array(Ysize, Y)),
-        train(X,Xsize, Xrownum,Y, Ysize, 2, perceptron, 50, 0.0001, B),
-        classifyNoProb(X,Xsize, Xrownum, NewY, NewYsize),
-        print(Ysize),
-        print(NewYsize),
-        convert_float_array_to_list(NewY, NewYsize, Results),
-        print([1,2,2,2,1,1]),
-        print(Results).
-
-:- end_tests(lists).
\ No newline at end of file
diff --git a/src/methods/ada_boost/Makefile b/src/methods/adaboost/Makefile
similarity index 92%
rename from src/methods/ada_boost/Makefile
rename to src/methods/adaboost/Makefile
index 537b732..8eda7ce 100644
--- a/src/methods/ada_boost/Makefile
+++ b/src/methods/adaboost/Makefile
@@ -1,6 +1,6 @@
 splfr=/usr/local/sicstus4.7.1/bin/splfr
 
-METHOD_NAME=ada_boost
+METHOD_NAME=adaboost
 
 $(METHOD_NAME).so: $(METHOD_NAME).pl $(METHOD_NAME).cpp
 	$(splfr) -larmadillo -fopenmp -lmlpack -lstdc++ -cxx --struct $(METHOD_NAME).pl $(METHOD_NAME).cpp ../../helper_files/helper.cpp
diff --git a/src/methods/adaboost/adaboost.cpp b/src/methods/adaboost/adaboost.cpp
new file mode 100644
index 0000000..e5c24eb
--- /dev/null
+++ b/src/methods/adaboost/adaboost.cpp
@@ -0,0 +1,314 @@
+#include <sicstus/sicstus.h>
+/* ex_glue.h is generated by splfr from the foreign/[2,3] facts.
+   Always include the glue header in your foreign resource code.
+*/
+#include "adaboost_glue.h"
+#include <mlpack/methods/adaboost/adaboost.hpp>
+#include <mlpack/core.hpp>
+
+// including helper functions for converting between arma structures and arrays
+#include "../../helper_files/helper.hpp"
+
+// some of the most used namespaces
+using namespace arma;
+using namespace mlpack;
+using namespace std;
+using namespace mlpack::adaboost;
+
+using namespace mlpack::tree;
+using namespace mlpack::perceptron;
+using namespace mlpack::util;
+
+// Global Variable of the GlobalMethodObject object so it can be accessed from all functions
+AdaBoost<> adaBoostPerceptron;
+AdaBoost<DecisionTree<>> adaBoostDecisionStump;
+
+bool usingPerceptron = true;
+bool isPerceptronTrained = false;
+bool isDecisionStumpTrained = false;
+
+
+// input: 	const MatType & 				data, 
+//			const arma::Row< size_t > & 	labels,
+//			const size_t 					numClasses,
+//			const WeakLearnerType & 		other,
+//			const size_t 					iterations = 100,
+//			const double 					tolerance = 1e-6
+// output: 
+// description: 
+//			Needs to be called first before all other predicates exept initModelNoTraining!
+//			Initiates the Adaboostmodel and trains it, so classify can be used immediately.
+//
+void initModelWithTraining(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMatRowNum, 
+							float *labelsArr, SP_integer labelsArrSize, 
+							SP_integer numClasses, 
+							char const *learner, 
+							SP_integer iterations, double tolerance)
+{
+	// convert the Prolog array to arma::mat
+	mat data = convertArrayToMat(dataMatArr, dataMatSize, dataMatRowNum);
+	// convert the Prolog array to arma::rowvec
+	Row< size_t > labelsVector = convertArrayToVec(labelsArr, labelsArrSize);
+
+	if(strcmp(learner, "perceptron") == 0)
+	{
+		usingPerceptron = true;
+		adaBoostPerceptron = AdaBoost<Perceptron<>>(data, labelsVector, numClasses, perceptron::Perceptron<>(numClasses, 0UL, iterations), iterations, tolerance);
+		isPerceptronTrained = true;
+	}
+	else if (strcmp(learner, "decision_stump") == 0)
+	{
+		usingPerceptron = false;
+		adaBoostDecisionStump = AdaBoost<DecisionTree<>>(data, labelsVector, numClasses, tree::DecisionTree<>(data,labelsVector, numClasses), iterations, tolerance);
+		isDecisionStumpTrained = true;
+	}
+	else
+	{
+		raisePrologDomainExeption(learner, 2, "perceptron or decision_stump", "initModelWithTraining");
+	}
+}
+
+
+// input: 	const double tolerance = 1e-6	
+// output: 
+// description: 
+//			Needs to be called first before all other predicates exept initModelWithTraining!
+//			Initiates the Adaboostmodel but doesn’t train it, so train has to be used first before classify can be used.
+//
+void initModelNoTraining(double tol = 1e-6, 
+							char const *learner = "perceptron")
+{
+	if(tol <= 0)
+	{
+		raisePrologDomainExeption(tol, 1, ">0", "initModelNoTraining");
+	}
+
+	if(strcmp(learner, "perceptron") == 0) 
+	{
+		usingPerceptron = true;
+		adaBoostPerceptron = AdaBoost<Perceptron<>>(tol);
+		isPerceptronTrained = false;
+	}
+	else if (strcmp(learner, "decision_stump") == 0)
+	{
+		usingPerceptron = false;
+		adaBoostDecisionStump = AdaBoost<DecisionStump<>>(tol);
+		isDecisionStumpTrained = false;
+	}
+	else
+	{
+		raisePrologDomainExeption(learner, 2, "perceptron or decision_stump", "initModelNoTraining");
+	}
+}
+
+
+// input: 	const MatType & 		test,
+//			arma::Row< size_t > & 	predictedLabels,
+//			arma::mat & 			probabilities
+// output: 
+// description: 
+//			Classifies the given data into the number of classes the model was trained for.
+//
+void classify(float *testMatArr, SP_integer testMatSize, SP_integer testMatRowNum, 
+				float **predicLabelsArr, SP_integer *predicLabelsArrSize, 
+				float **probsMatArr, SP_integer *probsMatColNum, SP_integer *probsMatRowNum)
+{
+	// convert the Prolog array to arma::mat
+	mat test = convertArrayToMat(testMatArr, testMatSize, testMatRowNum);
+
+	// create the ReturnVector
+	Row< size_t > predicLabelsReturnVector;
+	// create the ReturnMat
+	mat probsReturnMat;
+
+
+	// run the classify function on the model depending on if perceptron was selected
+	if(usingPerceptron && isPerceptronTrained)
+	{
+		try
+		{
+			adaBoostPerceptron.Classify(test, predicLabelsReturnVector, probsReturnMat);
+		}
+		catch(const std::exception& e)
+		{
+			std::cerr << e.what() << '\n';
+			raisePrologSystemExeption("The given data matrix has incorrect dimensions compared to the training data!");
+			return;
+		}
+		
+		
+	}
+	else if(!usingPerceptron && isDecisionStumpTrained)
+	{
+		try
+		{
+			adaBoostDecisionStump.Classify(test, predicLabelsReturnVector, probsReturnMat);
+		}
+		catch(const std::exception& e)
+		{
+			std::cerr << e.what() << '\n';
+			raisePrologSystemExeption("The given data matrix has incorrect dimensions compared to the training data!");
+			return;
+		}
+	}
+	else
+	{
+		// raise an exeption when classify gets called before the model is trained
+		raisePrologSystemExeption("The model is not trained!");
+		return;
+	}
+
+	// check if the return vectors have actual numbers stored
+	if (isnan(probsReturnMat[0]) || isnan(predicLabelsReturnVector[0]))
+	{
+		raisePrologSystemExeption("The given data matrix has incorrect dimensions compared to the training data!");
+		return;
+	}
+
+	// return the Vector
+	returnVectorInformation(predicLabelsReturnVector, predicLabelsArr, predicLabelsArrSize);
+	/// return the Matrix
+	returnMatrixInformation(probsReturnMat, probsMatArr, probsMatColNum, probsMatRowNum);
+}
+
+
+// input: 
+// output: 	size_t 	classesNum
+// description: 
+//			Returns the amount of classes defined in the model for classification.
+//
+SP_integer numClasses()
+{
+	if(usingPerceptron) 
+	{
+		return adaBoostPerceptron.NumClasses();
+	}
+	else
+	{
+		return adaBoostDecisionStump.NumClasses();
+	}
+}
+
+
+// input: 
+// output: 	double 	tol
+// description: 
+//			Returns the tolerance of the model.
+//
+double getTolerance()
+{
+	if(usingPerceptron) 
+	{
+		return adaBoostPerceptron.Tolerance();
+	}
+	else
+	{
+		return adaBoostDecisionStump.Tolerance();
+	}
+}
+
+
+// input: 
+// output: 	double&	tol
+// description: 
+//			Modifies the tolerance of the model.
+//
+void modifyTolerance(double newTol)
+{
+
+	if(newTol <= 0)
+	{
+		raisePrologDomainExeption(newTol, 1, ">0", "modifyTolerance");
+	}
+	if(usingPerceptron) 
+	{
+		double& tol = adaBoostPerceptron.Tolerance();
+		tol = newTol;
+	}
+	else
+	{
+		double& tol = adaBoostDecisionStump.Tolerance();
+		tol = newTol;
+	}
+}
+
+
+// input: 	const MatType & 				data,
+//			const arma::Row< size_t > & 	labels,
+//			const size_t 					numClasses,
+//			const WeakLearnerType & 		learner,
+//			const size_t 					iterations = 100,
+//			const double 					tolerance = 1e-6
+//
+// output: 	double upper bound training error
+// description: 
+//			Trains the model with the given data.
+//
+double train(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMatRowNum, 
+				float *labelsArr, SP_integer labelsArrSize, 
+				SP_integer numClasses, 
+				char const *learner, 
+				SP_integer iterations, double tolerance)
+{
+	// convert the Prolog array to arma::mat
+	mat data = convertArrayToMat(dataMatArr, dataMatSize, dataMatRowNum);
+	// convert the Prolog array to arma::rowvec
+	Row< size_t > labelsVector = convertArrayToVec(labelsArr, labelsArrSize);
+
+
+	// run the train fuction on the model given what learner was selected
+	if(strcmp(learner, "perceptron") == 0 && usingPerceptron) 
+	{
+		try
+		{
+			isPerceptronTrained = true;
+			return adaBoostPerceptron.Train(data, labelsVector, numClasses, Perceptron(), iterations, tolerance);
+		}
+		catch(const std::exception& e)
+		{
+			if (strcmp(e.what(),"Mat::col(): index out of bounds") == 0)
+			{
+				raisePrologSystemExeption("The values of the Label have to start at 0 and be >= 0 and < the given numClass!");
+			}
+			else if (strcmp(e.what(),"Row::subvec(): indices out of bounds or incorrectly used") == 0)
+			{
+				raisePrologSystemExeption("The given Labels Vector is too short!");
+			}
+			else
+			{
+				raisePrologSystemExeption(e.what());
+			}
+			return 0;
+		}
+		
+	}
+	else if (strcmp(learner, "decision_stump") == 0 && !usingPerceptron)
+	{
+		try
+		{
+			isDecisionStumpTrained = true;
+			return adaBoostDecisionStump.Train(data, labelsVector, numClasses, DecisionStump<>(), iterations, tolerance);
+		}
+		catch(const std::exception& e)
+		{
+			if (strcmp(e.what(),"Mat::col(): index out of bounds") == 0)
+			{
+				raisePrologSystemExeption("The values of the Label have to start at 0 and be >= 0 and < the given numClass!");
+			}
+			else if (strcmp(e.what(),"Row::subvec(): indices out of bounds or incorrectly used") == 0)
+			{
+				raisePrologSystemExeption("The given Labels Vector is too short!");
+			}
+			else
+			{
+				raisePrologSystemExeption(e.what());
+			}
+			return 0;
+		}
+	}
+	else
+	{
+		raisePrologDomainExeption(learner, 5, "perceptron or decision_stump", "train");
+	}
+	return 0;
+}
diff --git a/src/methods/ada_boost/ada_boost.pl b/src/methods/adaboost/adaboost.pl
similarity index 64%
rename from src/methods/ada_boost/ada_boost.pl
rename to src/methods/adaboost/adaboost.pl
index 3d9e822..6ed7e43 100644
--- a/src/methods/ada_boost/ada_boost.pl
+++ b/src/methods/adaboost/adaboost.pl
@@ -1,11 +1,11 @@
 
-:- module(ada_boost, [  initModelWithTraining/9, 
+:- module(adaboost, [   initModelWithTraining/7,
                         initModelNoTraining/2, 
-                        classify/8, 
+                        classify/5, 
                         numClasses/1, 
                         getTolerance/1, 
                         modifyTolerance/1, 
-                        train/10]).
+                        train/8]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -27,7 +27,7 @@
 %%            mat    data,
 %%            vec    labels,
 %%            int    numClasses,
-%%            string learner       => “decision_stump”, “perceptron”,
+%%            string learner       => "decision_stump", "perceptron",
 %%            int    iterations    => 100,
 %%            float  tolerance     => 1e-6
 %%
@@ -44,16 +44,23 @@ foreign(initModelWithTraining, c, initModelWithTraining(+pointer(float_array), +
                                                         +string, 
                                                         +integer , +float32)).
 
+initModelWithTraining(MatList, MatRows, VecList, NumClasses, Learner, Iterations, Tolerance) :-
+        NumClasses >= 0,
+        Iterations >= 0,
+        Tolerance >= 0,
+        convert_list_to_float_array(MatList, MatRows, array(Xsize, Xrownum, X)),
+        convert_list_to_float_array(VecList, array(Ysize, Y)),
+        initModelWithTraining(X, Xsize, Xrownum, Y, Ysize, NumClasses, Learner, Iterations, Tolerance).
 
 %% --Input--
-%%            string learner       => “decision_stump”, “perceptron”,
+%%            string learner       => "decision_stump", "perceptron",
 %%            float  tolerance     => 1e-6
 %%
 %% --Output--
 %%
 %% --Description--
 %%            Needs to be called first before all other predicates exept initModelWithTraining!
-%%            Initiates the Adaboostmodel but doesn’t train it, so train has to be used first before classify can be used.
+%%            Initiates the Adaboostmodel but doesnt train it, so train has to be used first before classify can be used.
 %%
 foreign(initModelNoTraining, c, initModelNoTraining(+float32, +string)).
 
@@ -68,9 +75,15 @@ foreign(initModelNoTraining, c, initModelNoTraining(+float32, +string)).
 %% --Description--
 %%            Classifies the given data into the number of classes the model was trained for.
 %%
-foreign(classify, c, classify(     +pointer(float_array), +integer, +integer, 
-                                   -pointer(float_array), -integer, 
-                                   -pointer(float_array), -integer, -integer)).
+foreign(classify, c, classify(  +pointer(float_array), +integer, +integer, 
+                                -pointer(float_array), -integer, 
+                                -pointer(float_array), -integer, -integer)).
+
+classify(TestList, TestRows, PredicList, ProbsList, ZRows) :-
+        convert_list_to_float_array(TestList, TestRows, array(Xsize, Xrownum, X)),
+        classify(X, Xsize, Xrownum, Y, Ysize, Z, ZCols, ZRows),
+        convert_float_array_to_list(Y, Ysize, PredicList),
+        convert_float_array_to_2d_list(Z, ZCols, ZRows, ProbsList).
 
 
 %% --Input--
@@ -110,7 +123,7 @@ foreign(modifyTolerance, c, modifyTolerance(+float32)).
 %%            mat    data,
 %%            vec    labels,
 %%            int    numClasses,
-%%            string learner       => “decision_stump”, “perceptron”,
+%%            string learner       => "decision_stump", "perceptron",
 %%            int    iterations    => 100,
 %%            float  tolerance     => 1e-6
 %%
@@ -125,9 +138,17 @@ foreign(train, c, train(    +pointer(float_array), +integer, +integer,
                             +integer , +float32, 
                             [-float32])).
 
+train(MatList, MatRows, VecList, NumClasses, Learner, Iterations, Tolerance, Error) :-
+        NumClasses >= 0,
+        Iterations >= 0,
+        Tolerance >= 0,
+        convert_list_to_float_array(MatList, MatRows, array(Xsize, Xrownum, X)),
+        convert_list_to_float_array(VecList, array(Ysize, Y)),
+        train(X, Xsize, Xrownum, Y, Ysize, NumClasses, Learner, Iterations, Tolerance, Error).
+
 
 %% Defines the functions that get connected from main.cpp
-foreign_resource(ada_boost, [   initModelWithTraining, 
+foreign_resource(adaboost, [    initModelWithTraining, 
                                 initModelNoTraining, 
                                 classify, 
                                 numClasses, 
@@ -135,4 +156,4 @@ foreign_resource(ada_boost, [   initModelWithTraining,
                                 modifyTolerance, 
                                 train]).
 
-:- load_foreign_resource(ada_boost).
+:- load_foreign_resource(adaboost).
diff --git a/src/methods/adaboost/adaboost_test.pl b/src/methods/adaboost/adaboost_test.pl
new file mode 100644
index 0000000..4154b50
--- /dev/null
+++ b/src/methods/adaboost/adaboost_test.pl
@@ -0,0 +1,321 @@
+:- use_module(library(plunit)).
+
+:- use_module(adaboost).
+
+:- use_module(library(csv)).
+:- use_module('../../helper_files/helper.pl').
+
+
+reset_Model_No_Train(Learner) :-
+        initModelNoTraining(0.0001, Learner).
+
+reset_Model_With_Train(Learner) :-
+        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, Learner, 50, 0.0001).
+
+
+%%
+%% TESTING predicate initModelWithTraining/7
+%%
+:- begin_tests(initModelWithTraining).
+
+%% Failure Tests
+
+test(initModelWithTraining_WrongInputTypes, fail) :-
+        initModelWithTraining(wrong, 3, [0.2,0.2,0.2,0.2], 2, perceptron, 50, 0.0001).
+
+test(initModelWithTraining_WrongTol, fail) :-
+        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], -2, perceptron, 50, 0.0001).
+
+test(initModelWithTraining_WrongLearner, [error(domain_error('perceptron or decision_stump' ,wrongLearner), _)]) :-
+        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, wrongLearner, 50, 0.0001).
+
+test(initModelWithTraining_WrongIterations, fail) :-
+        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, -50, 0.0001).
+
+test(initModelWithTraining_WrongTol, fail) :-
+        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, 50, -10.0).
+
+test(initModelWithTraining_MissmatchingLabels) :-
+        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0, 1 ,1], 2, perceptron, 50, 0.0001).
+
+
+%% Successful Tests
+
+test(initModelWithTraining_Perceptron) :-
+        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, 50, 0.0001).
+
+test(initModelWithTraining_DecisionStump) :-
+        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3], 3, [0, 0, 1], 2, decision_stump, 50, 0.0001).
+
+:- end_tests(initModelWithTraining).
+
+
+%%
+%% TESTING predicate initModelNoTraining/2
+%%
+:- begin_tests(initModelNoTrain).
+
+%% Failure Tests
+
+test(initModelNoTraining_WrongInputTypes, [error(type_error(number ,wrong), _)]) :-
+        initModelNoTraining(wrong, 1).
+
+test(initModelNoTraining_WrongLearner, [error(domain_error('perceptron or decision_stump' ,wrongLearner), _)]) :-
+        initModelNoTraining(0.0001, wrongLearner).
+
+test(initModelNoTraining_WrongTol, [error(domain_error('>0' ,-1.0), _)]) :-
+        initModelNoTraining(-1.0, perceptron).
+
+%% Successful Tests
+
+test(initModelNoTraining_Perceptron) :-
+        initModelNoTraining(0.001, perceptron).
+
+test(initModelNoTraining_DecisionStump) :-
+        initModelNoTraining(0.000014, decision_stump).
+
+:- end_tests(initModelNoTrain).
+
+
+%%
+%% TESTING predicate classify/8
+%%
+:- begin_tests(classify).
+
+%% Failure Tests
+test(classify_on_untrained_model, [error(_,system_error('The model is not trained!'))]) :-
+        reset_Model_No_Train(perceptron),
+        classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, _, _, _).
+
+test(classify_with_bad_data_input_perceptron, [error(_,system_error('The given data matrix has incorrect dimensions compared to the training data!'))]) :-
+        reset_Model_With_Train(perceptron),
+        classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 2, _, _, _).
+
+%% should cause an exeption but doesnt TODO:
+test(classify_with_bad_data_input_decision_stump) :-
+        reset_Model_With_Train(decision_stump),
+        classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 5, _, _, _).
+        
+
+%% Successful Tests
+test(classify_perceptron) :-
+        reset_Model_No_Train(perceptron),
+        open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
+        take_csv_row(File, skipFirstRow,10, Records),
+        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _),
+        classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4,
+                 [1.0,1.0,1.0,1.0,0.0],
+                 [[0.0,1.0],[0.0,1.0],[0.0,1.0],[0.0,1.0],[1.0,0.0]], 2).
+
+test(classify_decision_stump) :-
+        reset_Model_No_Train(decision_stump),
+        open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
+        take_csv_row(File, skipFirstRow,10, Records),
+        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, decision_stump, 50, 0.0001, _),
+        classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4,
+                 [1.0,1.0,1.0,1.0,1.0],
+                 [[0.0,1.0],[0.0,1.0],[0.0,1.0],[0.0,1.0],[0.0,1.0]], 2).
+
+:- end_tests(classify).
+
+
+%%
+%% TESTING predicate numClasses/1
+%%
+:- begin_tests(numClasses).
+
+test(numClasses_Perceptron_NoTrain, [true(Amount =:= 0)]) :-
+        reset_Model_No_Train(perceptron),
+        numClasses(Amount).
+
+test(numClasses_Decision_Stump_NoTrain, [true(Amount =:= 0)]) :-
+        reset_Model_No_Train(decision_stump),
+        numClasses(Amount).
+
+test(numClasses_Perceptron_WithTrain, [true(Amount =:= 2)]) :-
+        reset_Model_With_Train(perceptron),
+        numClasses(Amount).
+
+test(numClasses_Decision_Stump_WithTrain, [true(Amount =:= 2)]) :-
+        reset_Model_With_Train(decision_stump),
+        numClasses(Amount).
+
+test(numClasses_Custom_NumClasses, [true(Amount =:= 3)]) :-
+        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0001),
+        numClasses(Amount).
+        
+
+test(numClasses_afterTrain_Perceptron, [true(Amount =:= 2)]) :-
+        reset_Model_No_Train(perceptron),
+        open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
+        take_csv_row(File, skipFirstRow,10, Records),
+        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _),
+        numClasses(Amount).
+        
+:- end_tests(numClasses).
+
+
+%%
+%% TESTING predicate getTolerance/1
+%%
+:- begin_tests(getTolerance).
+
+test(getTolerance_Perceptron_NoTrain, [true(Amount =:= 0.0001)]) :-
+        reset_Model_No_Train(perceptron),
+        getTolerance(Amount).
+
+test(getTolerance_Decision_Stump_NoTrain, [true(Amount =:= 0.0001)]) :-
+        reset_Model_No_Train(decision_stump),
+        getTolerance(Amount).
+
+test(getTolerance_Perceptron_WithTrain, [true(Amount =:= 0.0001)]) :-
+        reset_Model_With_Train(perceptron),
+        getTolerance(Amount).
+
+test(getTolerance_Decision_Stump_WithTrain, [true(Amount =:= 0.0001)]) :-
+        reset_Model_With_Train(decision_stump),
+        getTolerance(Amount).
+
+test(getTolerance_Custom_Tolerance, [true(Amount =:= 0.0009)]) :-
+        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0009),
+        getTolerance(Amount).
+
+test(getTolerance_afterTrain, [true(Amount =:= 0.0005)]) :-
+        reset_Model_No_Train(perceptron),
+        open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
+        take_csv_row(File, skipFirstRow,10, Records),
+        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0005, _),
+        getTolerance(Amount).
+        
+:- end_tests(getTolerance).
+
+
+%%
+%% TESTING predicate modifyTolerance/1
+%%
+:- begin_tests(modifyTolerance).
+
+%% Failure Tests
+test(modifyTolerance_With_Negative_Input, [error(domain_error('>0' , -0.02), _)]) :-
+        reset_Model_No_Train(perceptron),
+        modifyTolerance(-0.02).
+
+
+%% Successful Tests
+test(modifyTolerance_Perceptron_NoTrain, [true(Amount =:= 0.02)]) :-
+        reset_Model_No_Train(perceptron),
+        modifyTolerance(0.02),
+        getTolerance(Amount).
+
+test(modifyTolerance_Decision_Stump_NoTrain, [true(Amount =:= 0.02)]) :-
+        reset_Model_No_Train(decision_stump),
+        modifyTolerance(0.02),
+        getTolerance(Amount).
+
+test(modifyTolerance_Perceptron_WithTrain, [true(Amount =:= 0.02)]) :-
+        reset_Model_With_Train(perceptron),
+        modifyTolerance(0.02),
+        getTolerance(Amount).
+
+test(modifyTolerance_Decision_Stump_WithTrain, [true(Amount =:= 0.02)]) :-
+        reset_Model_With_Train(decision_stump),
+        modifyTolerance(0.02),
+        getTolerance(Amount).
+
+test(modifyTolerance_Custom_Tolerance, [true(Amount =:= 0.02)]) :-
+        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0001),
+        modifyTolerance(0.02),
+        getTolerance(Amount).
+
+test(modifyTolerance_afterTrain, [true(Amount =:= 0.02)]) :-
+        reset_Model_No_Train(perceptron),
+        open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
+        take_csv_row(File, skipFirstRow,10, Records),
+        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _),
+        modifyTolerance(0.02),
+        getTolerance(Amount).
+        
+:- end_tests(modifyTolerance).
+
+
+%%
+%% TESTING predicate train/8
+%%
+:- begin_tests(train).      
+
+%% Failure Tests
+test(train_With_Bad_NumClass_Input, fail) :-
+        reset_Model_No_Train(perceptron),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -2, perceptron, 50, -0.0001, _).
+                                            
+test(train_With_Bad_Learner_Input, [error(domain_error('perceptron or decision_stump' ,wrongLearner), _)]) :-
+        reset_Model_No_Train(perceptron),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, wrongLearner, 50, 0.0001, _).
+
+test(train_With_Bad_Iterations_Input, fail) :-
+        reset_Model_No_Train(perceptron),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, -50, 0.0001, _).
+
+test(train_With_Bad_Tol_Input, fail) :-
+        reset_Model_No_Train(perceptron),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, -0.0001, _).
+
+test(train_With_Bad_Labels_Too_Many_Classes, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
+        reset_Model_No_Train(perceptron),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
+
+test(train_With_Bad_Labels_Negative, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
+        reset_Model_No_Train(perceptron),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, perceptron, 50, 0.0001, _).
+
+%% should cause an exeption but doesnt TODO:
+test(train_With_Bad_Labels_Negative) :-
+        reset_Model_No_Train(decision_stump),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, decision_stump, 50, 0.0001, _).
+
+%% seems to be allowed
+test(train_With_Too_Many_Labels) :-
+        reset_Model_No_Train(perceptron),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,0,0,1], 2, perceptron, 50, 0.0001, _).
+
+test(train_With_Too_Little_Labels, [error(_,system_error('The given Labels Vector is too short!'))]) :-
+        reset_Model_No_Train(decision_stump),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, decision_stump, 50, 0.0001, _).
+
+test(train_With_Negative_RowNumber, fail) :-
+        reset_Model_No_Train(decision_stump),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], -3, [0,0,0,0], 2, decision_stump, 50, 0.0001, _).
+        
+
+%% Successful Tests
+
+test(train_With_Direct_Input_Perceptron, [true(Error =:= 1)]) :-
+        reset_Model_No_Train(perceptron),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
+
+test(train_With_Data_From_CSV_Perceptron, [true(Error =:= 0.9797958971132711)]) :-
+        reset_Model_No_Train(perceptron),
+        open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
+        take_csv_row(File, skipFirstRow,10, Records),
+        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
+
+test(train_With_Direct_Input_Decision_Stump, [true(Error =:= 1)]) :-
+        reset_Model_No_Train(decision_stump),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, decision_stump, 50, 0.0001, Error).
+
+test(train_With_Data_From_CSV_Decision_Stump, [true(Error =:= 0.9797958971132711)]) :-
+        reset_Model_No_Train(decision_stump),
+        open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
+        take_csv_row(File, skipFirstRow,10, Records),
+        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, decision_stump, 50, 0.0001, Error).
+
+test(train_After_InitTrain_Perceptron, [true(Error =:= 1)]) :-
+        reset_Model_With_Train(perceptron),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, perceptron, 100, 0.01, Error).
+
+test(train_After_InitTrain_Decision_Stump, [true(Error =:= 1)]) :-
+        reset_Model_With_Train(decision_stump),
+        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, decision_stump, 100, 0.01, Error).
+
+:- end_tests(train).
+
-- 
GitLab