From 9debcb47ba62f8ffd1312e69b21e15f7986dad6a Mon Sep 17 00:00:00 2001
From: Jakhes <dean.schmitz@schmitzbauer.de>
Date: Sat, 12 Nov 2022 00:17:49 +0100
Subject: [PATCH] Finishing logistic_regression tests

---
 .../logistic_regression.cpp                   | 122 +++++----
 .../logistic_regression.pl                    |  52 ++--
 .../logistic_regression_test.pl               | 258 ++++++++++++++++--
 3 files changed, 340 insertions(+), 92 deletions(-)

diff --git a/src/methods/logistic_regression/logistic_regression.cpp b/src/methods/logistic_regression/logistic_regression.cpp
index df09022..75c3567 100644
--- a/src/methods/logistic_regression/logistic_regression.cpp
+++ b/src/methods/logistic_regression/logistic_regression.cpp
@@ -18,6 +18,8 @@ using namespace mlpack::regression;
 // Global Variable of the LogisticRegression object so it can be accessed from all functions
 LogisticRegression logisticRegression;
 
+int trainedDimension = 0;
+
 
 // input: 	const MatType & 				predictors,
 //			const arma::Row< size_t > & 	responses,
@@ -33,6 +35,13 @@ void initModelNoOptimizer(float *predictorsMatArr, SP_integer predictorsMatSize,
 {
 	// convert the Prolog array to arma::mat
 	mat predictors = convertArrayToMat(predictorsMatArr, predictorsMatSize, predictorsMatRowNum);
+	// check if labels fit the data
+    if (predictors.n_cols != responsesArrSize)
+    {
+        raisePrologSystemExeption("The number of data points does not match the number of labels!");
+        return;
+    }
+
 	// convert the Prolog array to arma::rowvec
 	arma::Row<size_t> responsesVector = convertArrayToVec(responsesArr, responsesArrSize);
 
@@ -46,6 +55,7 @@ void initModelNoOptimizer(float *predictorsMatArr, SP_integer predictorsMatSize,
 		raisePrologSystemExeption(e.what());
 		return;
 	}
+	trainedDimension = predictors.n_rows;
 }
 
 
@@ -60,25 +70,44 @@ void initModelNoOptimizer(float *predictorsMatArr, SP_integer predictorsMatSize,
 void initModelWithOptimizer(float *predictorsMatArr, SP_integer predictorsMatSize, SP_integer predictorsMatRowNum, 
 							float *responsesArr, SP_integer responsesArrSize, 
 							char const *optimizer, 
-							double lambda)
+							double lambda, SP_integer maxIterations, double tolerance, double stepSize, SP_integer batchSize)
 {
 	// convert the Prolog array to arma::mat
 	mat predictors = convertArrayToMat(predictorsMatArr, predictorsMatSize, predictorsMatRowNum);
+	if (predictors.n_cols != responsesArrSize)
+    {
+        raisePrologSystemExeption("The number of data points does not match the number of labels!");
+        return;
+    }
+
 	// convert the Prolog array to arma::rowvec
 	arma::Row<size_t> responsesVector = convertArrayToVec(responsesArr, responsesArrSize);
 
-
 	try
 	{
 		logisticRegression = LogisticRegression<>(predictors.n_cols, lambda);
 	
 		if (strcmp(optimizer, "lbfgs") == 0)
 		{
-			logisticRegression.Train(predictors,responsesVector);
+			ens::L_BFGS lbfgsOpt;
+      		lbfgsOpt.MaxIterations() = maxIterations;
+      		lbfgsOpt.MinGradientNorm() = tolerance;
+      		Log::Info << "Training model with L-BFGS optimizer." << endl;
+
+      		// This will train the model.
+      		logisticRegression.Train(predictors, responsesVector, lbfgsOpt);
 		}
 		else if (strcmp(optimizer, "psgd") == 0)
 		{
-			logisticRegression.Train(predictors,responsesVector, ens::ParallelSGD(100,64));
+			ens::SGD<> sgdOpt;
+      		sgdOpt.MaxIterations() = maxIterations;
+      		sgdOpt.Tolerance() = tolerance;
+      		sgdOpt.StepSize() = stepSize;
+      		sgdOpt.BatchSize() = batchSize;
+      		Log::Info << "Training model with SGD optimizer." << endl;
+
+      		// This will train the model.
+      		logisticRegression.Train(predictors,responsesVector, sgdOpt);
 		}
 		else
 		{
@@ -90,6 +119,7 @@ void initModelWithOptimizer(float *predictorsMatArr, SP_integer predictorsMatSiz
 		raisePrologSystemExeption(e.what());
 		return;
 	}
+	trainedDimension = predictors.n_rows;
 }
 
 
@@ -103,6 +133,11 @@ void initModelWithOptimizer(float *predictorsMatArr, SP_integer predictorsMatSiz
 SP_integer classifyPoint(float *pointArr, SP_integer pointArrSize, 
 							double decisionBoundary)
 {
+	if (pointArrSize != trainedDimension)
+	{
+		raisePrologSystemExeption("The given Datapoint has a different Dimension than the trained Model!");
+        return 0;
+	}
 	// convert the Prolog array to arma::rowvec
 	rowvec pointVector = convertArrayToRowvec(pointArr, pointArrSize);
 	try
@@ -130,9 +165,15 @@ void classifyMatrix(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMa
 					float **probsMatArr, SP_integer *probsMatColNum, SP_integer *probsMatRowNum, 
 					double decisionBoundary)
 {
+	
 	// convert the Prolog arrays to arma::mat
 	mat data = convertArrayToMat(dataMatArr, dataMatSize, dataMatRowNum);
-
+	if (data.n_rows != trainedDimension)
+	{
+		raisePrologSystemExeption("The given Datapoints have a different Dimension than the trained Model!");
+        return;
+	}
+	
 	// get the ReturnVector
 	Row<size_t> labelsReturnVector;
 	// create the ReturnMat
@@ -172,6 +213,17 @@ double computeAccuracy(float *predictorsMatArr, SP_integer predictorsMatSize, SP
 {
 	// convert the Prolog array to arma::mat
 	mat predictors = convertArrayToMat(predictorsMatArr, predictorsMatSize, predictorsMatRowNum);
+	if (predictors.n_cols != responsesArrSize)
+    {
+        raisePrologSystemExeption("The number of data points does not match the number of labels!");
+        return 0.0;
+    }
+	if (predictors.n_rows != trainedDimension)
+	{
+		raisePrologSystemExeption("The given Datapoints have a different Dimension than the trained Model!");
+        return 0.0;
+	}
+
 	// convert the Prolog array to arma::rowvec
 	arma::Row<size_t> responsesVector = convertArrayToVec(responsesArr, responsesArrSize);
 
@@ -199,57 +251,35 @@ double computeError(float *predictorsMatArr, SP_integer predictorsMatSize, SP_in
 {
 	// convert the Prolog arrays to arma::mat
 	mat predictors = convertArrayToMat(predictorsMatArr, predictorsMatSize, predictorsMatRowNum);
-	// convert the Prolog array to arma::rowvec
-	arma::Row<size_t> responsesVector = convertArrayToVec(responsesArr, responsesArrSize);
-
-	try
+	if (predictors.n_cols != responsesArrSize)
+    {
+        raisePrologSystemExeption("The number of data points does not match the number of labels!");
+        return 0.0;
+    }
+	if (predictors.n_rows != trainedDimension)
 	{
-		return logisticRegression.ComputeError(predictors, responsesVector);
+		raisePrologSystemExeption("The given Datapoints have a different Dimension than the trained Model!");
+        return 0.0;
 	}
-	catch(const std::exception& e)
-	{
-		raisePrologSystemExeption(e.what());
-		return 0.0;
-	}
-}
-
 
-// input: 	const MatType & 				predictors,
-//			const arma::Row< size_t > & 	responses,
-//			OptimizerType & 				optimizer,
-//			CallbackTypes &&... 			callbacks
-// output: 
-// description: 
-//			Train the logistic_regression model on the given input data.
-//
-void train(float *predictorsMatArr, SP_integer predictorsMatSize, SP_integer predictorsMatRowNum, 
-			float *responsesArr, SP_integer responsesArrSize, 
-			char const *optimizer)
-{
-	// convert the Prolog array to arma::mat
-	mat predictors = convertArrayToMat(predictorsMatArr, predictorsMatSize, predictorsMatRowNum);
 	// convert the Prolog array to arma::rowvec
 	arma::Row<size_t> responsesVector = convertArrayToVec(responsesArr, responsesArrSize);
 
-
+	double error = 0.0;
 	try
 	{
-		if (strcmp(optimizer, "lbfgs") == 0)
-		{
-			logisticRegression.Train(predictors,responsesVector);
-		}
-		else if (strcmp(optimizer, "psgd") == 0)
-		{
-			logisticRegression.Train(predictors,responsesVector, ens::ParallelSGD(100,64));
-		}
-		else
-		{
-			raisePrologDomainExeption(optimizer, 4, "The given Optimizer is unkown!", "initModelWithOptimizer");
-		}
+		error = logisticRegression.ComputeError(predictors, responsesVector);
 	}
 	catch(const std::exception& e)
 	{
 		raisePrologSystemExeption(e.what());
-		return;
+		return 0.0;
 	}
-}
+	if (error != error)
+	{
+		raisePrologSystemExeption("Error contains nan Value!");
+		return 0.0;
+	}
+	
+	return error;
+}
\ No newline at end of file
diff --git a/src/methods/logistic_regression/logistic_regression.pl b/src/methods/logistic_regression/logistic_regression.pl
index ecdc87b..d84ea2b 100644
--- a/src/methods/logistic_regression/logistic_regression.pl
+++ b/src/methods/logistic_regression/logistic_regression.pl
@@ -1,11 +1,10 @@
 
 :- module(logistic_regression, [        logistic_regression_initModelNoOptimizer/4,
-                                        logistic_regression_initModelWithOptimizer/5,
+                                        logistic_regression_initModelWithOptimizer/9,
                                         logistic_regression_classifyPoint/3,
                                         logistic_regression_classifyMatrix/6,
                                         logistic_regression_computeAccuracy/5,
-                                        logistic_regression_computeError/4,
-                                        logistic_regression_train/4]).
+                                        logistic_regression_computeError/4]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -34,6 +33,7 @@
 %%              Initialize the logistic_regression model without specifing a optimizer.
 %%
 logistic_regression_initModelNoOptimizer(DataList, DataRows, ResponsesList, Lambda) :-
+        Lambda >= 0.0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         initModelNoOptimizerI(X, Xsize, Xrownum, Y, Ysize, Lambda).
@@ -47,22 +47,31 @@ foreign(initModelNoOptimizer, c, initModelNoOptimizerI( +pointer(float_array), +
 %%              mat     data,
 %%              vec     responses,
 %%              string  optimizer       => "lbfgs", "psgd" => lbfgs,
-%%              float32 lambda          => 0.0
+%%              float32 lambda          => 0.0,
+%%              int     maxIterations   => 10000,
+%%              float32 tolerance       => 1e-10,
+%%              float32 stepSize        => 0.01,
+%%              int     batchSize       => 64
 %%
 %% --Output--
 %%
 %% --Description--
 %%              Initialize the logistic_regression model and specify the optimizer.
 %%
-logistic_regression_initModelWithOptimizer(DataList, DataRows, ResponsesList, Responses, Lambda) :-
+logistic_regression_initModelWithOptimizer(DataList, DataRows, ResponsesList, Responses, Lambda, MaxIterations, Tolerance, StepSize, BatchSize) :-
+        Lambda >= 0.0,
+        MaxIterations >= 0,
+        Tolerance >= 0.0,
+        StepSize >= 0.0,
+        BatchSize > 0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
-        initModelWithOptimizerI(X, Xsize, Xrownum, Y, Ysize, Responses, Lambda).
+        initModelWithOptimizerI(X, Xsize, Xrownum, Y, Ysize, Responses, Lambda, MaxIterations, Tolerance, StepSize, BatchSize).
 
 foreign(initModelWithOptimizer, c, initModelWithOptimizerI(     +pointer(float_array), +integer, +integer, 
                                                                 +pointer(float_array), +integer, 
                                                                 +string, 
-                                                                +float32)).
+                                                                +float32, +integer, +float32, +float32, +integer)).
 
 
 %% --Input--
@@ -76,6 +85,8 @@ foreign(initModelWithOptimizer, c, initModelWithOptimizerI(     +pointer(float_a
 %%              Classify the given point.
 %%
 logistic_regression_classifyPoint(PointList, DecisionBoundary, PredicLabel) :-
+        DecisionBoundary >= 0.0,
+        DecisionBoundary =< 1.0,
         convert_list_to_float_array(PointList, array(Xsize, X)),
         classifyPointI(X, Xsize, DecisionBoundary, PredicLabel).
 
@@ -96,6 +107,8 @@ foreign(classifyPoint, c, classifyPointI(       +pointer(float_array), +integer,
 %%              Classify the given points, returning the predicted labels for each point.
 %%
 logistic_regression_classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols, DecisionBoundary) :-
+        DecisionBoundary >= 0.0,
+        DecisionBoundary =< 1.0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         classifyMatrixI(X, Xsize, Xrows, Y, Ysize, Z, ZCols, ZRows, DecisionBoundary),
         convert_float_array_to_list(Y, Ysize, PredictionList),
@@ -119,6 +132,8 @@ foreign(classifyMatrix, c, classifyMatrixI(     +pointer(float_array), +integer,
 %%              Compute the accuracy of the model on the given predictors and responses, using the given decision boundary.
 %%
 logistic_regression_computeAccuracy(DataList, DataRows, ResponsesList, DecisionBoundary, Accuracy) :-
+        DecisionBoundary >= 0.0,
+        DecisionBoundary =< 1.0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         computeAccuracyI(X, Xsize, Xrownum, Y, Ysize, DecisionBoundary, Accuracy).
@@ -149,33 +164,12 @@ foreign(computeError, c, computeErrorI( +pointer(float_array), +integer, +intege
                                         [-float32])).
 
 
-%% --Input--
-%%              mat     data,
-%%              vec     responses,
-%%              string  optimizer       => "lbfgs", "psgd" => lbfgs
-%%
-%% --Output--
-%%
-%% --Description--
-%%              Train the logistic_regression model on the given input data.
-%%
-logistic_regression_train(DataList, DataRows, ResponsesList, Optimizer) :-
-        convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
-        convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
-        trainI(X, Xsize, Xrownum, Y, Ysize, Optimizer).
-
-foreign(train, c, trainI(       +pointer(float_array), +integer, +integer, 
-                                +pointer(float_array), +integer, 
-                                +string)).
-
-
 %% Defines the functions that get connected from main.cpp
 foreign_resource(logistic_regression, [         initModelNoOptimizer,
                                                 initModelWithOptimizer,
                                                 classifyPoint,
                                                 classifyMatrix,
                                                 computeAccuracy,
-                                                computeError,
-                                                train]).
+                                                computeError]).
 
 :- load_foreign_resource(logistic_regression).
diff --git a/src/methods/logistic_regression/logistic_regression_test.pl b/src/methods/logistic_regression/logistic_regression_test.pl
index f7d7fac..e1371f8 100644
--- a/src/methods/logistic_regression/logistic_regression_test.pl
+++ b/src/methods/logistic_regression/logistic_regression_test.pl
@@ -6,38 +6,262 @@
 :- use_module(logistic_regression).
 :- use_module('../../helper_files/helper.pl').
 
-reset_Model :-
-        logistic_regression_initModel(1,0,50,0.0001).
+reset_Model_NoOptimizer :-
+        logistic_regression_initModelNoOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 0.0).
+
+reset_Model_WithOptimizer :-
+        logistic_regression_initModelWithOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], lbfgs, 0.0, 1000, 1.0e-10, 0.01, 64).
 
 %%
-%% TESTING predicate predicate/10
+%% TESTING predicate logistic_regression_initModelNoOptimizer/4
 %%
-:- begin_tests(predicate).      
+:- begin_tests(logistic_regression_initModelNoOptimizer).      
 
 %% Failure Tests
                                             
-test(testDescription, [error(domain_error('expectation' , culprit), _)]) :-
-        reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, culprit, 50, 0.0001, _).
+test(logistic_regression_InitModelNoOptimizer_Negative_Lambda, fail) :-
+        logistic_regression_initModelNoOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -1.0).
+
+
+test(logistic_regression_InitModelNoOptimizer_Too_Few_Labels, [error(_, system_error('The number of data points does not match the number of labels!'))]) :-
+        logistic_regression_initModelNoOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 0.0).
+
+test(logistic_regression_InitModelNoOptimizer_Too_Many_Labels, [error(_, system_error('The number of data points does not match the number of labels!'))]) :-
+        logistic_regression_initModelNoOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 0.0).
 
-test(testDescription2, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
-        reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
+%% doesnt cause error
+test(logistic_regression_InitModelNoOptimizer_Too_Many_Labelclasses) :-
+        logistic_regression_initModelNoOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 0.0).
         
 
 %% Successful Tests
 
-test(testDescription3, [true(Error =:= 1)]) :-
-        reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
+test(logistic_regression_InitModelNoOptimizer_Normal_Use) :-
+        logistic_regression_initModelNoOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 0.0).
 
-test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
-        reset_Model_No_Train(perceptron),
+test(logistic_regression_InitModelNoOptimizer_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
+        logistic_regression_initModelNoOptimizer(Data, 4, [0,1,0,1,1,0,1,1,1,0], 0.0).
+
+:- end_tests(logistic_regression_initModelNoOptimizer).
+
+
+
+%%
+%% TESTING predicate logistic_regression_initModelWithOptimizer/10
+%%
+:- begin_tests(logistic_regression_initModelWithOptimizer).      
+
+%% Failure Tests
+
+test(logistic_regression_InitModelWithOptimizer_Wrong_Optimizer, [error(domain_error('The given Optimizer is unkown!' , wrongInput), _)]) :-
+        logistic_regression_initModelWithOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], wrongInput, 0.0, 1000, 1.0e-10, 0.01, 64).
+                                            
+test(logistic_regression_InitModelWithOptimizer_Negative_Lambda, fail) :-
+        logistic_regression_initModelWithOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], lbfgs, -1.0, 1000, 1.0e-10, 0.01, 64).
+
+test(logistic_regression_InitModelWithOptimizer_Negative_MaxIteration, fail) :-
+        logistic_regression_initModelWithOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], lbfgs, 0.0, -1000, 1.0e-10, 0.01, 64).
+
+test(logistic_regression_InitModelWithOptimizer_Negative_Tolerance, fail) :-
+        logistic_regression_initModelWithOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], lbfgs, 0.0, 1000, -1.0e-10, 0.01, 64).
+
+test(logistic_regression_InitModelWithOptimizer_Negative_StepSize, fail) :-
+        logistic_regression_initModelWithOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], lbfgs, 0.0, 1000, 1.0e-10, -0.01, 64).
+
+test(logistic_regression_InitModelWithOptimizer_Negative_BatchSize, fail) :-
+        logistic_regression_initModelWithOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], lbfgs, 0.0, 1000, 1.0e-10, 0.01, -64).
+
+
+test(logistic_regression_InitModelWithOptimizer_Too_Few_Labels, [error(_, system_error('The number of data points does not match the number of labels!'))]) :-
+        logistic_regression_initModelWithOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], lbfgs, 0.0, 1000, 1.0e-10, 0.01, 64).
+
+test(logistic_regression_InitModelWithOptimizer_Too_Many_Labels, [error(_, system_error('The number of data points does not match the number of labels!'))]) :-
+        logistic_regression_initModelWithOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], lbfgs, 0.0, 1000, 1.0e-10, 0.01, 64).
+
+%% doesnt cause error
+test(logistic_regression_InitModelWithOptimizer_Too_Many_Labelclasses) :-
+        logistic_regression_initModelWithOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], lbfgs, 0.0, 1000, 1.0e-10, 0.01, 64).
+        
+
+%% Successful Tests
+
+test(logistic_regression_InitModelWithOptimizer_Normal_Use_LBFGS) :-
+        logistic_regression_initModelWithOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], lbfgs, 0.0, 1000, 1.0e-10, 0.01, 64).
+
+test(logistic_regression_InitModelWithOptimizer_CSV_Input_LBFGS) :-
+        open('src/data_csv/iris2.csv', read, File),
+        take_csv_row(File, skipFirstRow,10, Data),
+        logistic_regression_initModelWithOptimizer(Data, 4, [0,1,0,1,1,0,1,1,1,0], lbfgs, 0.0, 1000, 1.0e-10, 0.01, 64).
+
+test(logistic_regression_InitModelWithOptimizer_Normal_Use_PSGD) :-
+        logistic_regression_initModelWithOptimizer([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], psgd, 0.0, 1000, 1.0e-10, 0.01, 64).
+
+test(logistic_regression_InitModelWithOptimizer_CSV_Input_PSGD) :-
+        open('src/data_csv/iris2.csv', read, File),
+        take_csv_row(File, skipFirstRow,10, Data),
+        logistic_regression_initModelWithOptimizer(Data, 4, [0,1,0,1,1,0,1,1,1,0], psgd, 0.0, 1000, 1.0e-10, 0.01, 64).
+
+:- end_tests(logistic_regression_initModelWithOptimizer).
+
+
+
+%%
+%% TESTING predicate logistic_regression_classifyPoint/3
+%%
+:- begin_tests(logistic_regression_classifyPoint).      
+
+%% Failure Tests
+                                            
+test(logistic_regression_ClassifyPoint_Different_Dims, [error(_, system_error('The given Datapoint has a different Dimension than the trained Model!'))]) :-
+        reset_Model_WithOptimizer,
+        logistic_regression_classifyPoint([5.1,3.5,1.4,4.9], 0.5, _).
+
+test(logistic_regression_ClassifyPoint_Bad_Boundary_Input, fail) :-
+        reset_Model_WithOptimizer,
+        logistic_regression_classifyPoint([5.1,3.5,1.4], -0.5, _),
+        logistic_regression_classifyPoint([5.1,3.5,1.4], 1.5, _).
+        
+
+%% Successful Tests
+
+test(logistic_regression_ClassifyPoint_Normal_Use) :-
+        reset_Model_WithOptimizer,
+        logistic_regression_classifyPoint([5.1,3.5,1.4], 0.5, PredictLabel),
+        print('\nPredicted Label: '),
+        print(PredictLabel).
+
+test(logistic_regression_ClassifyPoint_NoOptimizer) :-
+        reset_Model_NoOptimizer,
+        logistic_regression_classifyPoint([5.1,3.5,1.4], 0.5, PredictLabel),
+        print('\nPredicted Label: '),
+        print(PredictLabel).
+
+:- end_tests(logistic_regression_classifyPoint).
+
+
+
+%%
+%% TESTING predicate logistic_regression_classifyMatrix/10
+%%
+:- begin_tests(logistic_regression_classifyMatrix).      
+
+%% Failure Tests
+                                            
+test(logistic_regression_ClassifyMatrix_Different_Dims, [error(_, system_error('The given Datapoints have a different Dimension than the trained Model!'))]) :-
+        reset_Model_WithOptimizer,
+        logistic_regression_classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _, _, _, 0.5).
+
+test(logistic_regression_ClassifyMatrix_Bad_Boundary_Input, fail) :-
+        reset_Model_WithOptimizer,
+        logistic_regression_classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, -0.5),
+        logistic_regression_classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, 1.5).
+        
+
+%% Successful Tests
+
+test(logistic_regression_ClassifyMatrix_Normal_Use) :-
+        reset_Model_WithOptimizer,
+        logistic_regression_classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredictLabels, ProbsList, _, 0.5),
+        print('\nPredicted Labels: '),
+        print(PredictLabels),
+        print('\nProbabilities: '),
+        print(ProbsList).
+
+test(logistic_regression_ClassifyMatrix_NoOptimizer) :-
+        reset_Model_NoOptimizer,
+        logistic_regression_classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredictLabels, ProbsList, _, 0.5),
+        print('\nPredicted Labels: '),
+        print(PredictLabels),
+        print('\nProbabilities: '),
+        print(ProbsList).
+
+:- end_tests(logistic_regression_classifyMatrix).
+
+
+
+%%
+%% TESTING predicate logistic_regression_computeAccuracy/5
+%%
+:- begin_tests(logistic_regression_computeAccuracy).      
+
+%% Failure Tests
+
+test(logistic_regression_ComputeAccuracy_Different_Dims, [error(_, system_error('The given Datapoints have a different Dimension than the trained Model!'))]) :-
+        reset_Model_WithOptimizer,
+        logistic_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 0.5, _).
+
+test(logistic_regression_ComputeAccuracy_Bad_Boundary_Input, fail) :-
+        reset_Model_WithOptimizer,
+        logistic_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -0.5, _),
+        logistic_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 1.5, _).
+        
+
+test(logistic_regression_ComputeAccuracy_Too_Few_Labels, [error(_, system_error('The number of data points does not match the number of labels!'))]) :-
+        logistic_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 0.5, _).
+
+test(logistic_regression_ComputeAccuracy_Too_Many_Labels, [error(_, system_error('The number of data points does not match the number of labels!'))]) :-
+        logistic_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 0.5, _).
+
+%% doesnt cause error
+test(logistic_regression_computeAccuracy_Too_Many_Labelclasses) :-
+        logistic_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 0.5, _).
+
+%% Successful Tests
+
+test(logistic_regression_ComputeAccuracy_Normal_Use) :-
+        reset_Model_WithOptimizer,
+        logistic_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 0.5, AccuracyList),
+        print('\nAccuracy: '),
+        print(AccuracyList).
+
+test(logistic_regression_ComputeAccuracy_NoOptimizer) :-
+        reset_Model_NoOptimizer,
+        logistic_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 0.5, AccuracyList),
+        print('\nAccuracy: '),
+        print(AccuracyList).
+
+:- end_tests(logistic_regression_computeAccuracy).
+
+
+
+%%
+%% TESTING predicate logistic_regression_computeError/4
+%%
+:- begin_tests(logistic_regression_computeError).      
+
+%% Failure Tests
+
+test(logistic_regression_computeError_Different_Dims, [error(_, system_error('The given Datapoints have a different Dimension than the trained Model!'))]) :-
+        reset_Model_WithOptimizer,
+        logistic_regression_computeError([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], _).
+        
+
+test(logistic_regression_computeError_Too_Few_Labels, [error(_, system_error('The number of data points does not match the number of labels!'))]) :-
+        logistic_regression_computeError([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], _).
+
+test(logistic_regression_computeError_Too_Many_Labels, [error(_, system_error('The number of data points does not match the number of labels!'))]) :-
+        logistic_regression_computeError([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], _).
+
+test(logistic_regression_computeError_Too_Many_Labelclasses, [error(_, system_error('Error contains nan Value!'))]) :-
+        logistic_regression_computeError([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], _).
+
+%% Successful Tests
+
+test(logistic_regression_computeError_Normal_Use) :-
+        reset_Model_WithOptimizer,
+        logistic_regression_computeError([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], ErrorList),
+        print('\nError: '),
+        print(ErrorList).
+
+test(logistic_regression_computeError_NoOptimizer) :-
+        reset_Model_NoOptimizer,
+        logistic_regression_computeError([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], ErrorList),
+        print('\nError: '),
+        print(ErrorList).
 
-:- end_tests(predicate).
+:- end_tests(logistic_regression_computeError).
 
 run_logistic_regression_tests :-
         run_tests.
-- 
GitLab