Skip to content
Snippets Groups Projects
Commit b613b60c authored by Jakhes's avatar Jakhes
Browse files

Fixing some Errors in the tests.

parent a9f6c13f
No related branches found
No related tags found
No related merge requests found
Showing
with 846 additions and 292 deletions
......@@ -7,6 +7,9 @@ using namespace arma;
using namespace mlpack;
using namespace std;
// conversion from arma datatypes to arrays usable by prolog
float *convertToArray(colvec vec);
float *convertToArray(rowvec vec);
......@@ -24,12 +27,17 @@ float *convertToArray(Row<size_t> vec);
float *convertToArray(vector<vec> vec);
// conversion from arrays to arma datatypes
rowvec convertArrayToRowvec(float *arr, int vecSize);
Row<size_t> convertArrayToVec(float *arr, int vecSize);
mat convertArrayToMat(float *arr, int vecSize, int rowCount);
// insert the Vector and Matrix informations into the prolog Variables
void returnMatrixInformation(mat matrix, float **mat, SP_integer *matColNum, SP_integer *matRowNum);
void returnMatrixInformation(Mat< size_t > matrix, float **mat, SP_integer *matColNum, SP_integer *matRowNum);
......
......@@ -269,12 +269,12 @@ test(train_With_Bad_Labels_Too_Many_Classes, [error(_,system_error('The values o
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
test(train_With_Bad_Labels_Negative, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
test(train_With_Bad_Labels_Negative_Perceptron, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, perceptron, 50, 0.0001, _).
%% should cause an exeption but doesnt TODO:
test(train_With_Bad_Labels_Negative) :-
test(train_With_Bad_Labels_Negative_Decision_Stump) :-
reset_Model_No_Train(decision_stump),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, decision_stump, 50, 0.0001, _).
......
......@@ -17,10 +17,10 @@ using namespace std;
using namespace mlpack::neighbor;
// Global Variable of the DrusillaSelect object so it can be accessed from all functions
DrusillaSelect<mat> drusillaSelect = DrusillaSelect(0,0);
DrusillaSelect<mat> drusillaSelect = DrusillaSelect(1,1);
// Global Variable of the QDAFN object so it can be accessed from all functions
QDAFN<mat> qdafn = QDAFN(0,0);
QDAFN<mat> qdafn = QDAFN(1,1);
// TODO:
// input: const size_t l,
......
:- module(approx_kfn_tests, [run_approx_kfn_tests/0]).
:- use_module(library(plunit)).
:- use_module(approx_kfn).
......@@ -6,51 +9,36 @@
reset_Model :-
initModel(1,0,50,0.0001).
:- begin_tests(lists).
%% alpha tests
test(alpha_std_init) :-
reset_Model,
alpha(0).
test(alpha_wrong_input, fail) :-
reset_Model,
alpha(1).
test(alpha_after_train, A =:= 9223372036854775808) :-
reset_Model,
convert_list_to_float_array([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5],3, array(Xsize, Xrownum, X)),
convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
train(X,Xsize, Xrownum,Y, Ysize),
alpha(A).
%% train tests
test(correct_train) :-
reset_Model,
convert_list_to_float_array([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5],3, array(Xsize, Xrownum, X)),
convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
train(X,Xsize, Xrownum,Y, Ysize).
test(false_train, fail) :-
reset_Model,
convert_list_to_float_array([],3, array(Xsize, Xrownum, X)),
convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
train(X,Xsize, Xrownum,Y, Ysize).
test(false_train2, fail) :-
reset_Model,
convert_list_to_float_array([],0, array(Xsize, Xrownum, X)),
convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
train(X,Xsize, Xrownum,Y, Ysize).
test(false_train3, fail) :-
reset_Model,
convert_list_to_float_array([1,2],0, array(Xsize, Xrownum, X)),
convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
train(X,Xsize, Xrownum,Y, Ysize).
test(false_train3, fail) :-
reset_Model,
convert_list_to_float_array([1,2,44,3],3, array(Xsize, Xrownum, X)),
convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
train(X,Xsize, Xrownum,Y, Ysize).
test(false_train4) :-
reset_Model,
convert_list_to_float_array([1,2,44,3],2, array(Xsize, Xrownum, X)),
convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
train(X,Xsize, Xrownum,Y, Ysize).
:- end_tests(lists).
\ No newline at end of file
%%
%% TESTING predicate predicate/10
%%
:- begin_tests(predicate).
%% Failure Tests
test(testDescription, [error(domain_error('expectation' , culprit), _)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, culprit, 50, 0.0001, _).
test(testDescription2, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
%% Successful Tests
test(testDescription3, [true(Error =:= 1)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
reset_Model_No_Train(perceptron),
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
:- end_tests(predicate).
run_approx_kfn_tests :-
run_tests.
......@@ -77,7 +77,8 @@ void omega(float **omegaArr, SP_integer *omegaArrSize)
// input: const arma::mat &points,
// arma::rowvec &predictions
// output:
void predict(float *pointsMatArr, SP_integer pointsMatSize, SP_integer pointsMatRowNum, float **predictionsArr, SP_integer *predictionsArrSize)
void predict(float *pointsMatArr, SP_integer pointsMatSize, SP_integer pointsMatRowNum,
float **predictionsArr, SP_integer *predictionsArrSize)
{
// convert the Prolog array to arma::mat
mat points = convertArrayToMat(pointsMatArr, pointsMatSize, pointsMatRowNum);
......@@ -86,8 +87,16 @@ void predict(float *pointsMatArr, SP_integer pointsMatSize, SP_integer pointsMat
rowvec predictionsReturnVector;
try
{
// run the prediction and save the result in arma::rowvec
regressor.Predict(points, predictionsReturnVector);
}
catch(const std::exception& e)
{
raisePrologSystemExeption(e.what());
return;
}
// return the Vector
......@@ -98,7 +107,9 @@ void predict(float *pointsMatArr, SP_integer pointsMatSize, SP_integer pointsMat
// arma::rowvec &predictions,
// arma::rowvec &std
// output:
void predictWithStd(float *pointsMatArr, SP_integer pointsMatSize, SP_integer pointsMatRowNum, float **predictionsArr, SP_integer *predictionsArrSize, float **stdArr, SP_integer *stdArrSize)
void predictWithStd(float *pointsMatArr, SP_integer pointsMatSize, SP_integer pointsMatRowNum,
float **predictionsArr, SP_integer *predictionsArrSize,
float **stdArr, SP_integer *stdArrSize)
{
// convert the Prolog array to arma::mat
mat points = convertArrayToMat(pointsMatArr, pointsMatSize, pointsMatRowNum);
......@@ -108,8 +119,16 @@ void predictWithStd(float *pointsMatArr, SP_integer pointsMatSize, SP_integer po
rowvec stdReturnVector;
try
{
// run the prediction and save the result in arma::rowvec
regressor.Predict(points, predictionsReturnVector, stdReturnVector);
}
catch(const std::exception& e)
{
raisePrologSystemExeption(e.what());
return;
}
// return the Vectors
......@@ -128,7 +147,8 @@ double responsesOffset()
// input: const arma::mat & data,
// const arma::rowvec & responses
// output: double
double rmse(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMatRowNum, float *responsesArr, SP_integer responsesArrSize)
double rmse(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMatRowNum,
float *responsesArr, SP_integer responsesArrSize)
{
// convert the Prolog array to arma::mat
mat data = convertArrayToMat(dataMatArr, dataMatSize, dataMatRowNum);
......@@ -137,14 +157,23 @@ double rmse(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMatRowNum,
rowvec responsesVector = convertArrayToRowvec(responsesArr, responsesArrSize);
try
{
// run the model function and return the error
return regressor.RMSE(data, responsesVector);
}
catch(const std::exception& e)
{
raisePrologSystemExeption(e.what());
return 0.0;
}
}
// input: const arma::mat & data,
// const arma::rowvec & responses
// output:
void train(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMatRowNum, float *responsesArr, SP_integer responsesArrSize)
void train(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMatRowNum,
float *responsesArr, SP_integer responsesArrSize)
{
if(dataMatSize / dataMatRowNum != responsesArrSize)
{
......@@ -158,9 +187,17 @@ void train(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMatRowNum,
rowvec responsesVector = convertArrayToRowvec(responsesArr, responsesArrSize);
try
{
// run the model function
regressor.Train(data, responsesVector);
}
catch(const std::exception& e)
{
raisePrologSystemExeption(e.what());
return;
}
}
// input:
// output: double
......
:- module(bayesian_linear_regression_tests, [run_bayesian_linear_regression_tests/0]).
:- use_module(library(plunit)).
%%:- compile(bayesian_linear_regression).
......@@ -57,3 +60,37 @@ test(false_train4) :-
convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
train(X,Xsize, Xrownum,Y, Ysize).
:- end_tests(lists).
%%
%% TESTING predicate predicate/10
%%
:- begin_tests(predicate).
%% Failure Tests
test(testDescription, [error(domain_error('expectation' , culprit), _)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, culprit, 50, 0.0001, _).
test(testDescription2, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
%% Successful Tests
test(testDescription3, [true(Error =:= 1)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
reset_Model_No_Train(perceptron),
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
:- end_tests(predicate).
run_bayesian_linear_regression_tests :-
run_tests.
......@@ -17,6 +17,7 @@ using namespace mlpack::tree;
// Global Variable of the HoeffdingTreeModel object so it can be accessed from all functions
HoeffdingTreeModel hoeffdingTreeObj;
bool isModelInitialized = false;
// input: const TreeType & type = GINI_HOEFFDING,
......@@ -48,19 +49,34 @@ void initAndBuildModel(char const *treeType,
// convert the Prolog arrays to arma::rowvec
Row< size_t > labelsVector = convertArrayToVec(labelsArr, labelsArrSize);
if (strcmp(treeType, "gini-hoeffding") == 0)
if (strcmp(treeType, "gini_hoeffding") == 0)
hoeffdingTreeObj = HoeffdingTreeModel(HoeffdingTreeModel::GINI_HOEFFDING);
else if (strcmp(treeType, "gini-binary") == 0)
else if (strcmp(treeType, "gini_binary") == 0)
hoeffdingTreeObj = HoeffdingTreeModel(HoeffdingTreeModel::GINI_BINARY);
else if (strcmp(treeType, "info-hoeffding") == 0)
else if (strcmp(treeType, "info_hoeffding") == 0)
hoeffdingTreeObj = HoeffdingTreeModel(HoeffdingTreeModel::INFO_HOEFFDING);
else if (strcmp(treeType, "info-binary") == 0)
else if (strcmp(treeType, "info_binary") == 0)
hoeffdingTreeObj = HoeffdingTreeModel(HoeffdingTreeModel::INFO_BINARY);
else
cout << "wrong treeType input" << endl;
{
raisePrologDomainExeption(treeType, 1, "The given TreeType is unkown!", "initAndBuildModel");
return;
}
try
{
hoeffdingTreeObj.BuildModel(data, data::DatasetInfo(data.n_rows), labelsVector, numClasses, (batchTraining == 1), successProbability, maxSamples, checkInterval, minSamples, bins, observationsBeforeBinning);
}
catch(const std::exception& e)
{
raisePrologSystemExeption(e.what());
isModelInitialized = false;
return;
}
isModelInitialized = true;
}
// input: const arma::mat & dataset,
......@@ -74,6 +90,12 @@ void classify(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMatRowNu
float **predictArr, SP_integer *predictArrSize,
float **probsArr, SP_integer *probsArrSize)
{
if (!isModelInitialized)
{
raisePrologSystemExeption("The model is not initialized!");
return;
}
// convert the Prolog arrays to arma::mat
mat data = convertArrayToMat(dataMatArr, dataMatSize, dataMatRowNum);
......@@ -84,7 +106,15 @@ void classify(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMatRowNu
rowvec probsReturnVector;
try
{
hoeffdingTreeObj.Classify(data, predictReturnVector, probsReturnVector);
}
catch(const std::exception& e)
{
raisePrologSystemExeption(e.what());
return;
}
// return the Vector
......@@ -106,11 +136,26 @@ void train(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMatRowNum,
float *labelsArr, SP_integer labelsArrSize,
SP_integer batchTraining)
{
if (!isModelInitialized)
{
raisePrologSystemExeption("The model is not initialized!");
return;
}
// convert the Prolog arrays to arma::mat
mat data = convertArrayToMat(dataMatArr, dataMatSize, dataMatRowNum);
// convert the Prolog arrays to arma::rowvec
Row< size_t > labelsVector = convertArrayToVec(labelsArr, labelsArrSize);
try
{
hoeffdingTreeObj.Train(data, labelsVector, (batchTraining == 1));
}
catch(const std::exception& e)
{
raisePrologSystemExeption("nice");
return;
}
}
......@@ -20,7 +20,7 @@
%% --Input--
%% string treeType => "gini-hoeffding", "gini-binary", "info-hoeffding", "info-binary",
%% string treeType => "gini_hoeffding", "gini_binary", "info_hoeffding", "info_binary",
%% mat dataset,
%% vec labels,
%% int numClasses,
......@@ -43,6 +43,10 @@ initAndBuildModel(TreeType, DataList, DataRows, LabelsList, NumClasses, BatchTra
SuccessProbability >= 0,
SuccessProbability =< 1,
MaxSamples >= 0,
CheckInterval > 0,
MinSamples >= 0,
Bins >= 0,
ObservationsBeforeBinning >= 0,
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
convert_list_to_float_array(LabelsList, array(Ysize, Y)),
initAndBuildModelI(TreeType, X, Xsize, Xrownum, Y, Ysize, NumClasses, BatchTraining, SuccessProbability, MaxSamples, CheckInterval, MinSamples, Bins, ObservationsBeforeBinning).
......
......@@ -7,7 +7,9 @@
:- use_module('../../helper_files/helper.pl').
reset_Model :-
initAndBuildModel(1,0,50,0.0001).
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
%%
%% TESTING predicate initAndBuildModel/12
......@@ -16,26 +18,83 @@ reset_Model :-
%% Failure Tests
test(testDescription, [error(domain_error('expectation' , culprit), _)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, culprit, 50, 0.0001, _).
test(hoeffding_Init_Classify_Befor_Init, [error(_,system_error('The model is not initialized!'))]) :-
classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _).
test(hoeffding_Init_Wrong_TreeType, [error(domain_error('The given TreeType is unkown!' , wrongType), _)]) :-
initAndBuildModel(wrongType, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_Negative_NumClass, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_Bad_SuccessProbability, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, -1.0, 5000, 100, 100, 10, 100),
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 2.0, 5000, 100, 100, 10, 100).
test(hoeffding_Init_Negative_MaxSamples, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, -5000, 100, 100, 10, 100).
test(hoeffding_Init_Negative_CheckInterval, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, -100, 100, 10, 100).
test(hoeffding_Init_Negative_MinSamples, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, -100, 10, 100).
test(hoeffding_Init_Negative_Bins, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, -10, 100).
test(hoeffding_Init_Negative_ObservationsBeforeBinning, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, -100).
test(testDescription2, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
test(hoeffding_Init_With_Wrong_Label_Dims1, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
%% If the label vector is to long it seems to cause no problems
test(hoeffding_Init_With_Wrong_Label_Dims2) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
%% The same when the label values are out of range
test(hoeffding_Init_With_Wrong_Label_Value) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_With_Too_Many_Label_Value, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [1,1,0,2], 2, 0, 0.95, 5000, 100, 100, 10, 100).
%% Successful Tests
test(testDescription3, [true(Error =:= 1)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
test(hoeffding_Init_GiniHoeffding_Direkt_Input) :-
initAndBuildModel(gini_hoeffding, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_GiniHoeffding_CSV_Input) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(gini_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_GiniBinary_Direkt_Input) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_GiniBinary_CSV_Input) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_InfoHoeffding_Direkt_Input) :-
initAndBuildModel(info_hoeffding, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_InfoHoeffding_CSV_Input) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(info_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_InfoBinary_Direkt_Input) :-
initAndBuildModel(info_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
reset_Model_No_Train(perceptron),
test(hoeffding_Init_InfoBinary_CSV_Input) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
initAndBuildModel(info_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
:- end_tests(initAndBuildModel).
......@@ -47,26 +106,56 @@ test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
%% Failure Tests
test(testDescription, [error(domain_error('expectation' , culprit), _)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, culprit, 50, 0.0001, _).
test(testDescription2, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
test(hoeffding_Classify_Different_Dims_To_Train, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :-
reset_Model,
classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredicList, ProbsList),
print('\nPredictions: '),
print(PredicList),
print('\nProbabilities: '),
print(ProbsList).
%% Successful Tests
test(testDescription3, [true(Error =:= 1)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
reset_Model_No_Train(perceptron),
test(hoeffding_Classify_GiniHoeffding) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(gini_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
print('\nPredictions: '),
print(PredicList),
print('\nProbabilities: '),
print(ProbsList).
test(hoeffding_Classify_GiniBinary) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
print('\nPredictions: '),
print(PredicList),
print('\nProbabilities: '),
print(ProbsList).
test(hoeffding_Classify_InfoHoeffding) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(info_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
print('\nPredictions: '),
print(PredicList),
print('\nProbabilities: '),
print(ProbsList).
test(hoeffding_Classify_InfoBinary) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(info_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
print('\nPredictions: '),
print(PredicList),
print('\nProbabilities: '),
print(ProbsList).
:- end_tests(classify).
......@@ -78,29 +167,38 @@ test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
%% Failure Tests
test(testDescription, [error(domain_error('expectation' , culprit), _)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, culprit, 50, 0.0001, _).
test(hoeffding_Train_With_Wrong_Label_Dims1, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :-
reset_Model,
train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,0,0], 0).
test(testDescription2, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
%% If the label vector is to long it seems to cause no problems
test(hoeffding_Train_With_Wrong_Label_Dims2) :-
reset_Model,
train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,0,0,1], 0).
%% The same when the label values are out of range
test(hoeffding_Train_With_Wrong_Label_Value) :-
reset_Model,
train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,-1,0,-1], 0).
%% Successful Tests
test(hoeffding_Train_With_Too_Many_Label_Value, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model,
train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [1,1,0,2], 0).
test(testDescription3, [true(Error =:= 1)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
test(hoeffding_Train_Bad_Data_Dims) :-
reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 0).
test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
reset_Model_No_Train(perceptron),
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
%% Successful Tests
test(testDescription3) :-
reset_Model,
train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,0,1,1,0], 1),
train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,0,1,1,0], 0).
:- end_tests(train).
run_hoeffding_tree_tests :-
run_tests.
run_tests(train).
......@@ -67,21 +67,22 @@ void initAndBuildModel(double bandwidth, double relError, double absError,
}
else
{
raisePrologSystemExeption("The given KernelType is unknown!");
raisePrologDomainExeption(kernelType, 4, "The given KernelType is unknown!", "initAndBuildModel");
return;
}
// select the treeType
mlpack::kde::KDEModel::TreeTypes treeT;
if (strcmp(treeType, "kd-tree") == 0)
if (strcmp(treeType, "kd_tree") == 0)
{
treeT = KDEModel::KD_TREE;
}
else if (strcmp(treeType, "ball-tree") == 0)
else if (strcmp(treeType, "ball_tree") == 0)
{
treeT = KDEModel::BALL_TREE;
}
else if (strcmp(treeType, "cover-tree") == 0)
else if (strcmp(treeType, "cover_tree") == 0)
{
treeT = KDEModel::COVER_TREE;
}
......@@ -89,42 +90,43 @@ void initAndBuildModel(double bandwidth, double relError, double absError,
{
treeT = KDEModel::OCTREE;
}
else if (strcmp(treeType, "r-tree") == 0)
else if (strcmp(treeType, "r_tree") == 0)
{
treeT = KDEModel::R_TREE;
}
else
{
raisePrologSystemExeption("The given TreeType is unknown!");
raisePrologDomainExeption(treeType, 5, "The given TreeType is unknown!", "initAndBuildModel");
return;
}
try
{
kdeModel = KDEModel(bandwidth, relError, absError, kernelT, treeT, (monteCarlo == 1), mcProb, initialSampleSize, mcEntryCoef, mcBreakCoef);
kdeModel.BuildModel(move(reference));
}
catch(const std::exception& e)
{
raisePrologSystemExeption(e.what());
return;
}
if (strcmp(algorithm, "dual-tree") == 0)
if (strcmp(algorithm, "dual_tree") == 0)
{
KDEMode& mode = kdeModel.Mode();
mode = KDEMode::DUAL_TREE_MODE;
}
else if (strcmp(algorithm, "single-tree") == 0)
else if (strcmp(algorithm, "single_tree") == 0)
{
KDEMode& mode = kdeModel.Mode();
mode = KDEMode::SINGLE_TREE_MODE;
}
else
{
raisePrologSystemExeption("The given Algorithm is unknown!");
}
try
{
kdeModel.BuildModel(move(reference));
}
catch(const std::exception& e)
{
raisePrologSystemExeption(e.what());
raisePrologDomainExeption(algorithm, 6, "The given Algorithm is unknown!", "initAndBuildModel");
return;
}
}
......@@ -151,6 +153,7 @@ void evaluateWithQuery(float *querySetMatArr, SP_integer querySetMatSize, SP_int
catch(const std::exception& e)
{
raisePrologSystemExeption(e.what());
return;
}
......@@ -178,6 +181,7 @@ void evaluateNoQuery(float **estimationsArr, SP_integer *estimationsArrSize)
catch(const std::exception& e)
{
raisePrologSystemExeption(e.what());
return;
}
......
......@@ -7,98 +7,295 @@
:- use_module('../../helper_files/helper.pl').
reset_Model :-
initModel(1,0,50,0.0001).
initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
%%
%% TESTING predicate initAndBuildModel/10
%% TESTING predicate initAndBuildModel/13
%%
:- begin_tests(initAndBuildModel).
%% Failure Tests
test(testDescription, [error(domain_error('expectation' , culprit), _)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, culprit, 50, 0.0001, _).
%% only raises an exeption if the model hasnt been initilized once, so only after fully reseting the tests.
test(kde_EvaluateWithQuery_Befor_Init, [error(_, system_error('no KDE model initialized'))]) :-
evaluateWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _).
test(testDescription2, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
%% only raises an exeption if the model hasnt been initilized once, so only after fully reseting the tests.
test(kde_EvaluateNoQuery_Befor_Init, [error(_, system_error('no KDE model initialized'))]) :-
evaluateNoQuery(_).
test(kde_Init_Negative_Bandwidth, fail) :-
initAndBuildModel(-1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
%% Successful Tests
test(kde_Init_Bad_RelError, fail) :-
initAndBuildModel(1.0, -0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 1.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Negative_AbsError, fail) :-
initAndBuildModel(1.0, 0.05, -1.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Wrong_KernelType, [error(domain_error('The given KernelType is unknown!' , wrongType), _)]) :-
initAndBuildModel(1.0, 0.05, 0.0, wrongType, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Wrong_TreeType, [error(domain_error('The given TreeType is unknown!' , wrongType), _)]) :-
initAndBuildModel(1.0, 0.05, 0.0, gaussian, wrongType, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Wrong_Algorithm, [error(domain_error('The given Algorithm is unknown!' , wrongType), _)]) :-
initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, wrongType, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Bad_McProb, fail) :-
initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, -0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 1.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Negative_InitialSampleSize, fail) :-
initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, -100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Negative_MCEntryCoef, fail) :-
initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 0.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Bad_MCBreakCoef, fail) :-
initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, -0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 1.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(testDescription3, [true(Error =:= 1)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
reset_Model_No_Train(perceptron),
%% Successful Tests
%% Gaussian Kernel
test(kde_Init_Gaussian_KDTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Gaussian_BALLTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, gaussian, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, gaussian, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Gaussian_COVERTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, gaussian, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, gaussian, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Gaussian_OCTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, gaussian, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, gaussian, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Gaussian_RTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, gaussian, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, gaussian, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
%% Epanechnikov Kernel
test(kde_Init_Epanechnikov_KDTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Epanechnikov_BALLTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Epanechnikov_COVERTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Epanechnikov_OCTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Epanechnikov_RTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
%% Laplacian Kernel
test(kde_Init_Laplacian_KDTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, laplacian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, laplacian, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Laplacian_BALLTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, laplacian, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, laplacian, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Laplacian_COVERTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, laplacian, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, laplacian, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Laplacian_OCTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, laplacian, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, laplacian, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Laplacian_RTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, laplacian, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, laplacian, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
%% Spherical Kernel
test(kde_Init_Spherical_KDTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, spherical, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, spherical, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Spherical_BALLTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, spherical, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, spherical, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Spherical_COVERTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, spherical, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, spherical, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Spherical_OCTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, spherical, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, spherical, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Spherical_RTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, spherical, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, spherical, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
%% Triangular Kernel
test(kde_Init_Triangular_KDTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, triangular, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, triangular, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Triangular_BALLTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, triangular, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, triangular, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Triangular_COVERTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, triangular, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, triangular, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Triangular_OCTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, triangular, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, triangular, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_Triangular_RTREE) :-
initAndBuildModel(1.0, 0.05, 0.0, triangular, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(1.0, 0.05, 0.0, triangular, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kde_Init_CSV_Input) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
initAndBuildModel(2.2, 0.25, 0.5, gaussian, kd_tree, dual_tree, 1, 0.75, 50, 2.0, 0.2,
Data, 4).
:- end_tests(initAndBuildModel).
%%
%% TESTING predicate evaluateWithQuery/10
%% TESTING predicate evaluateWithQuery/3
%%
:- begin_tests(evaluateWithQuery).
%% Failure Tests
test(testDescription, [error(domain_error('expectation' , culprit), _)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, culprit, 50, 0.0001, _).
test(testDescription2, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
test(kde_EvalWithQuery_Wrong_Query_Dims, [error(_, system_error('cannot evaluate KDE model: querySet and referenceSet dimensions don\'t match'))]) :-
reset_Model,
evaluateWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, Estimation),
print('\nEstimation: '),
print(Estimation).
%% Successful Tests
test(testDescription3, [true(Error =:= 1)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
test(kde_EvalWithQuery) :-
reset_Model,
evaluateWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5], 3, Estimation),
print('\nEstimation: '),
print(Estimation).
test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
reset_Model_No_Train(perceptron),
test(kde_EvalWithQuery_CSV_Input) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
initAndBuildModel(2.2, 0.25, 0.5, gaussian, kd_tree, dual_tree, 1, 0.75, 50, 2.0, 0.2,
Data, 4),
evaluateWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, Estimation),
print('\nEstimation: '),
print(Estimation).
:- end_tests(evaluateWithQuery).
%%
%% TESTING predicate evaluateNoQuery/10
%% TESTING predicate evaluateNoQuery/1
%%
:- begin_tests(evaluateNoQuery).
%% Failure Tests
test(testDescription, [error(domain_error('expectation' , culprit), _)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, culprit, 50, 0.0001, _).
test(testDescription2, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
%% Successful Tests
test(testDescription3, [true(Error =:= 1)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
reset_Model_No_Train(perceptron),
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
test(kde_EvalNoQuery) :-
reset_Model,
evaluateNoQuery(Estimation),
print('\nEstimation: '),
print(Estimation).
:- end_tests(evaluateNoQuery).
......
......@@ -23,7 +23,7 @@ using namespace mlpack::kernel;
template<typename KernelType>
void useNystroemKernelPCA(char const *nystroemMethod, KernelType kernel, bool centerTranformedData, mat data, mat transformedDataReturnMat, vec eigvalReturnVector, mat eigvecReturnMat, size_t returnDim)
bool useNystroemKernelPCA(char const *nystroemMethod, KernelType kernel, bool centerTranformedData, mat data, mat transformedDataReturnMat, vec eigvalReturnVector, mat eigvecReturnMat, size_t returnDim)
{
if (strcmp(nystroemMethod, "kmeans") == 0)
{
......@@ -42,8 +42,10 @@ void useNystroemKernelPCA(char const *nystroemMethod, KernelType kernel, bool ce
}
else
{
raisePrologDomainExeption(nystroemMethod, 4, "The given NystoemMethod is unkown!", "kernel_pca");
raisePrologDomainExeption(nystroemMethod, 4, "The given NystroemMethod is unknown!", "kernel_pca");
return false;
}
return true;
}
// TODO:
......@@ -62,7 +64,7 @@ void kernel_pca(char const *kernel, SP_integer centerTranformedData, SP_integer
float **transformedDataMatArr, SP_integer *transformedDataMatColNum, SP_integer *transformedDataMatRowNum,
float **eigvalArr, SP_integer *eigvalArrSize,
float **eigvecMatArr, SP_integer *eigvecMatColNum, SP_integer *eigvecMatRowNum,
SP_integer *newDimension)
SP_integer newDimension)
{
// convert the Prolog array to arma::mat
mat data = convertArrayToMat(dataMatArr, dataMatSize, dataMatRowNum);
......@@ -75,20 +77,19 @@ void kernel_pca(char const *kernel, SP_integer centerTranformedData, SP_integer
// create the ReturnMat
mat eigvecReturnMat;
size_t returnDim = 0;
if (strcmp(kernel, "linear") == 0)
{
LinearKernel lk;
if(useNystroem == 1)
{
useNystroemKernelPCA<LinearKernel>(nystroemMethod, lk, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
if(!useNystroemKernelPCA<LinearKernel>(nystroemMethod, lk, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension))
return;
}
else
{
KernelPCA<LinearKernel, NaiveKernelRule<LinearKernel>>(lk, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
KernelPCA<LinearKernel>(lk, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension);
}
}
else if (strcmp(kernel, "polynomial") == 0)
......@@ -96,12 +97,13 @@ void kernel_pca(char const *kernel, SP_integer centerTranformedData, SP_integer
PolynomialKernel pk(degree, offset);
if(useNystroem == 1)
{
useNystroemKernelPCA<PolynomialKernel>(nystroemMethod, pk, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
if(!useNystroemKernelPCA<PolynomialKernel>(nystroemMethod, pk, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension))
return;
}
else
{
KernelPCA<PolynomialKernel, NaiveKernelRule<PolynomialKernel>>(pk, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
KernelPCA<PolynomialKernel>(pk, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension);
}
}
else if (strcmp(kernel, "cosine") == 0)
......@@ -109,12 +111,13 @@ void kernel_pca(char const *kernel, SP_integer centerTranformedData, SP_integer
CosineDistance cd;
if(useNystroem == 1)
{
useNystroemKernelPCA<CosineDistance>(nystroemMethod, cd, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
if(!useNystroemKernelPCA<CosineDistance>(nystroemMethod, cd, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension))
return;
}
else
{
KernelPCA<CosineDistance, NaiveKernelRule<CosineDistance>>(cd, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
KernelPCA<CosineDistance>(cd, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension);
}
}
else if (strcmp(kernel, "gaussian") == 0)
......@@ -122,12 +125,13 @@ void kernel_pca(char const *kernel, SP_integer centerTranformedData, SP_integer
GaussianKernel gk(bandwidth);
if(useNystroem == 1)
{
useNystroemKernelPCA<GaussianKernel>(nystroemMethod, gk, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
if(!useNystroemKernelPCA<GaussianKernel>(nystroemMethod, gk, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension))
return;
}
else
{
KernelPCA<GaussianKernel, NaiveKernelRule<GaussianKernel>>(gk, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
KernelPCA<GaussianKernel>(gk, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension);
}
}
else if (strcmp(kernel, "epanechnikov") == 0)
......@@ -135,12 +139,13 @@ void kernel_pca(char const *kernel, SP_integer centerTranformedData, SP_integer
EpanechnikovKernel ek(bandwidth);
if(useNystroem == 1)
{
useNystroemKernelPCA<EpanechnikovKernel>(nystroemMethod, ek, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
if(!useNystroemKernelPCA<EpanechnikovKernel>(nystroemMethod, ek, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension))
return;
}
else
{
KernelPCA<EpanechnikovKernel, NaiveKernelRule<EpanechnikovKernel>>(ek, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
KernelPCA<EpanechnikovKernel>(ek, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension);
}
}
else if (strcmp(kernel, "laplacian") == 0)
......@@ -148,12 +153,13 @@ void kernel_pca(char const *kernel, SP_integer centerTranformedData, SP_integer
LaplacianKernel tk(bandwidth);
if(useNystroem == 1)
{
useNystroemKernelPCA<LaplacianKernel>(nystroemMethod, tk, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
if(!useNystroemKernelPCA<LaplacianKernel>(nystroemMethod, tk, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension))
return;
}
else
{
KernelPCA<LaplacianKernel, NaiveKernelRule<LaplacianKernel>>(tk, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
KernelPCA<LaplacianKernel>(tk, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension);
}
}
else if (strcmp(kernel, "hyptan") == 0)
......@@ -161,19 +167,35 @@ void kernel_pca(char const *kernel, SP_integer centerTranformedData, SP_integer
HyperbolicTangentKernel htk(scale, offset);
if(useNystroem == 1)
{
useNystroemKernelPCA<HyperbolicTangentKernel>(nystroemMethod, htk, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
if(!useNystroemKernelPCA<HyperbolicTangentKernel>(nystroemMethod, htk, (centerTranformedData == 1), data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension))
return;
}
else
{
KernelPCA<HyperbolicTangentKernel, NaiveKernelRule<HyperbolicTangentKernel>>(htk, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, returnDim);
KernelPCA<HyperbolicTangentKernel>(htk, (centerTranformedData == 1))
.Apply(data, transformedDataReturnMat, eigvalReturnVector, eigvecReturnMat, newDimension);
}
}
else
{
raisePrologDomainExeption(kernel, 1, "The given Kernel is unkown!", "kernel_pca");
return;
}
if (newDimension < eigvecReturnMat.n_rows && newDimension > 0 && !useNystroem)
transformedDataReturnMat.shed_rows(newDimension, transformedDataReturnMat.n_rows - 1);
else
{
transformedDataReturnMat.shed_rows(data.n_rows, transformedDataReturnMat.n_rows - 1);
}
if(isnan(transformedDataReturnMat[0]))
{
raisePrologSystemExeption("Matrix contains nan!");
return;
}
// return the transformedData Matrix
returnMatrixInformation(transformedDataReturnMat, transformedDataMatArr, transformedDataMatColNum, transformedDataMatRowNum);
......@@ -182,6 +204,4 @@ void kernel_pca(char const *kernel, SP_integer centerTranformedData, SP_integer
// return the eigvec Matrix
returnMatrixInformation(eigvecReturnMat, eigvecMatArr, eigvecMatColNum, eigvecMatRowNum);
*newDimension = returnDim;
}
......@@ -43,6 +43,8 @@ kernel_pca(Kernel, CenterTransformedData, UseNystroem, NystroemMethod, Degree, O
EigVecList, EigVecCols,
NewDimension) :-
Bandwidth > 0.0,
NewDimension >= 0,
Bandwidth =< DataRows,
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
kernel_pcaI(Kernel, CenterTransformedData, UseNystroem, NystroemMethod, Degree, Offset, Bandwidth, Scale,
X, Xsize, Xrownum,
......@@ -60,7 +62,7 @@ foreign(kernel_pca, c, kernel_pcaI( +string, +integer, +integer, +string,
-pointer(float_array), -integer, -integer,
-pointer(float_array), -integer,
-pointer(float_array), -integer, -integer,
-integer)).
+integer)).
%% Defines the functions that get connected from main.cpp
......
......@@ -6,8 +6,6 @@
:- use_module(kernel_pca).
:- use_module('../../helper_files/helper.pl').
reset_Model :-
initModel(1,0,50,0.0001).
%%
%% TESTING predicate kernel_pca/16
......@@ -16,26 +14,166 @@ reset_Model :-
%% Failure Tests
test(testDescription, [error(domain_error('expectation' , culprit), _)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, culprit, 50, 0.0001, _).
test(testDescription2, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
test(kernel_pca_Init_Wrong_Kernel, [error(domain_error('The given Kernel is unkown!' , wrongType), _)]) :-
kernel_pca(wrongType, 0, 0, kmeans, 1.0, 0.0, 1.0, 1.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0).
test(kernel_pca_Init_Wrong_NystroemMethod, [error(domain_error('The given NystroemMethod is unknown!' , wrongType), _)]) :-
kernel_pca(linear, 0, 1, wrongType, 1.0, 0.0, 1.0, 1.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0).
test(kernel_pca_Zero_Bandwidth, fail) :-
kernel_pca(linear, 0, 0, kmeans, 1.0, 0.0, 0.0, 1.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0).
%% Successful Tests
test(testDescription3, [true(Error =:= 1)]) :-
reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
test(kernel_pca_Linear_Random) :-
kernel_pca(linear, 0, 0, random, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, TransformedList, _, EigValList, EigVecList, _, 0),
print('\nTransformedData: '),
print(TransformedList),
print('\nEigenValues: '),
print(EigValList),
print('\nEigenVectors: '),
print(EigVecList).
test(kernel_pca_Linear_Ordered) :-
kernel_pca(linear, 0, 0, ordered, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, TransformedList, _, EigValList, EigVecList, _, 0),
print('\nTransformedData: '),
print(TransformedList),
print('\nEigenValues: '),
print(EigValList),
print('\nEigenVectors: '),
print(EigVecList).
test(kernel_pca_Linear_Kmeans) :-
kernel_pca(linear, 0, 0, kmeans, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, TransformedList, _, EigValList, EigVecList, _, 0),
print('\nTransformedData: '),
print(TransformedList),
print('\nEigenValues: '),
print(EigValList),
print('\nEigenVectors: '),
print(EigVecList).
test(kernel_pca_Gaussian) :-
kernel_pca(gaussian, 0, 0, random, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0),
kernel_pca(gaussian, 0, 0, ordered, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0),
kernel_pca(gaussian, 0, 0, kmeans, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, TransformedList, _, EigValList, EigVecList, _, 0),
print('\nTransformedData: '),
print(TransformedList),
print('\nEigenValues: '),
print(EigValList),
print('\nEigenVectors: '),
print(EigVecList).
test(kernel_pca_Polynomial) :-
kernel_pca(polynomial, 0, 0, random, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0),
kernel_pca(polynomial, 0, 0, ordered, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0),
kernel_pca(polynomial, 0, 0, kmeans, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, TransformedList, _, EigValList, EigVecList, _, 0),
print('\nTransformedData: '),
print(TransformedList),
print('\nEigenValues: '),
print(EigValList),
print('\nEigenVectors: '),
print(EigVecList).
test(kernel_pca_Hyptan, [error(_, system_error('Matrix contains nan!'))]) :-
kernel_pca(hyptan, 0, 0, random, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0),
kernel_pca(hyptan, 0, 0, ordered, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0),
kernel_pca(hyptan, 0, 0, kmeans, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, TransformedList, _, EigValList, EigVecList, _, 0),
print('\nTransformedData: '),
print(TransformedList),
print('\nEigenValues: '),
print(EigValList),
print('\nEigenVectors: '),
print(EigVecList).
test(kernel_pca_Laplacian) :-
kernel_pca(laplacian, 0, 0, random, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0),
kernel_pca(laplacian, 0, 0, ordered, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0),
kernel_pca(laplacian, 0, 0, kmeans, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, TransformedList, _, EigValList, EigVecList, _, 0),
print('\nTransformedData: '),
print(TransformedList),
print('\nEigenValues: '),
print(EigValList),
print('\nEigenVectors: '),
print(EigVecList).
test(kernel_pca_Epanechnikov) :-
kernel_pca(epanechnikov, 0, 0, random, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0),
kernel_pca(epanechnikov, 0, 0, ordered, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0),
kernel_pca(epanechnikov, 0, 0, kmeans, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, TransformedList, _, EigValList, EigVecList, _, 0),
print('\nTransformedData: '),
print(TransformedList),
print('\nEigenValues: '),
print(EigValList),
print('\nEigenVectors: '),
print(EigVecList).
test(kernel_pca_Cosine) :-
kernel_pca(cosine, 0, 0, random, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0),
kernel_pca(cosine, 0, 0, ordered, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _, _, _, 0),
kernel_pca(cosine, 0, 0, kmeans, 1.0, 0.0, 1.0, 1.0,
[5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, TransformedList, _, EigValList, EigVecList, _, 0),
print('\nTransformedData: '),
print(TransformedList),
print('\nEigenValues: '),
print(EigValList),
print('\nEigenVectors: '),
print(EigVecList).
test(kernel_pca_CSV_Input) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
kernel_pca(linear, 0, 0, kmeans, 1.0, 0.0, 1.0, 1.0, Data, 4, TransformedList, _, EigValList, EigVecList, _, 0),
print('\nTransformedData: '),
print(TransformedList),
print('\nEigenValues: '),
print(EigValList),
print('\nEigenVectors: '),
print(EigVecList).
test(kernel_pca_CSV_Input_Same_NewDims) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
kernel_pca(linear, 0, 0, kmeans, 1.0, 0.0, 1.0, 1.0, Data, 4, TransformedList, _, EigValList, EigVecList, _, 4),
print('\nTransformedData: '),
print(TransformedList),
print('\nEigenValues: '),
print(EigValList),
print('\nEigenVectors: '),
print(EigVecList).
test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
reset_Model_No_Train(perceptron),
test(kernel_pca_CSV_Input_Smaller_NewDims) :-
open('/home/afkjakhes/eclipse-workspace/prolog-mlpack-libary/src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
kernel_pca(linear, 0, 0, kmeans, 1.0, 0.0, 1.0, 1.0, Data, 4, TransformedList, _, EigValList, EigVecList, _, 2),
print('\nTransformedData: '),
print(TransformedList),
print('\nEigenValues: '),
print(EigValList),
print('\nEigenVectors: '),
print(EigVecList).
:- end_tests(kernel_pca).
......
......@@ -153,7 +153,7 @@ double computeError(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMa
if(dataMatSize / dataMatRowNum != responsesArrSize)
{
cout << "Target dim doesnt fit to the Data dim" << endl;
return;
return 0.0;
}
// convert the Prolog array to arma::mat
mat data = convertArrayToMat(dataMatArr, dataMatSize, dataMatRowNum);
......@@ -221,7 +221,7 @@ double train(float *dataMatArr, SP_integer dataMatSize, SP_integer dataMatRowNum
if(dataMatSize / dataMatRowNum != responsesArrSize)
{
cout << "Target dim doesnt fit to the Data dim" << endl;
return;
return 0.0;
}
// convert the Prolog array to arma::mat
mat data = convertArrayToMat(dataMatArr, dataMatSize, dataMatRowNum);
......
:- use_module(library(plunit)).
%% Just compile this file and call run_tests. to run every tests
:- use_module('src/methods/adaboost/adaboost_test.pl').
:- use_module('src/methods/approx_kfn/approx_kfn_test.pl').
%%:- use_module('src/methods/approx_kfn/approx_kfn_test.pl').
:- use_module('src/methods/bayesian_linear_regression/bayesian_linear_regression_test.pl').
%%:- use_module('src/methods/bayesian_linear_regression/bayesian_linear_regression_test.pl').
:- use_module('src/methods/dbscan/dbscan_test.pl').
......@@ -13,83 +19,53 @@
:- use_module('src/methods/fastmks/fastmks_test.pl').
:- use_module('src/methods/hoeffding_tree/hoeffding_tree_test.pl').
%%:- use_module('src/methods/hoeffding_tree/hoeffding_tree_test.pl').
:- use_module('src/methods/kde/kde_test.pl').
:- use_module('src/methods/kernel_pca/kernel_pca_test.pl').
:- use_module('src/methods/kfn/kfn_test.pl').
%%:- use_module('src/methods/kfn/kfn_test.pl').
:- use_module('src/methods/kmeans/kmeans_test.pl').
%%:- use_module('src/methods/kmeans/kmeans_test.pl').
:- use_module('src/methods/knn/knn_test.pl').
%%:- use_module('src/methods/knn/knn_test.pl').
:- use_module('src/methods/lars/lars_test.pl').
%%:- use_module('src/methods/lars/lars_test.pl').
:- use_module('src/methods/linear_regression/linear_regression_test.pl').
%%:- use_module('src/methods/linear_regression/linear_regression_test.pl').
:- use_module('src/methods/linear_SVM/linear_SVM_test.pl').
%%:- use_module('src/methods/linear_SVM/linear_SVM_test.pl').
:- use_module('src/methods/lmnn/lmnn_test.pl').
%%:- use_module('src/methods/lmnn/lmnn_test.pl').
:- use_module('src/methods/local_coordinate_coding/local_coordinate_coding_test.pl').
%%:- use_module('src/methods/local_coordinate_coding/local_coordinate_coding_test.pl').
:- use_module('src/methods/logistic_regression/logistic_regression_test.pl').
%%:- use_module('src/methods/logistic_regression/logistic_regression_test.pl').
:- use_module('src/methods/lsh/lsh_test.pl').
%%:- use_module('src/methods/lsh/lsh_test.pl').
:- use_module('src/methods/mean_shift/mean_shift_test.pl').
%%:- use_module('src/methods/mean_shift/mean_shift_test.pl').
:- use_module('src/methods/naive_bayes_classifier/naive_bayes_classifier_test.pl').
%%:- use_module('src/methods/naive_bayes_classifier/naive_bayes_classifier_test.pl').
:- use_module('src/methods/nca/nca_test.pl').
%%:- use_module('src/methods/nca/nca_test.pl').
:- use_module('src/methods/nmf/nmf_test.pl').
%%:- use_module('src/methods/nmf/nmf_test.pl').
:- use_module('src/methods/pca/pca_test.pl').
%%:- use_module('src/methods/pca/pca_test.pl').
:- use_module('src/methods/perceptron/perceptron_test.pl').
%%:- use_module('src/methods/perceptron/perceptron_test.pl').
:- use_module('src/methods/radical/radical_test.pl').
%%:- use_module('src/methods/radical/radical_test.pl').
:- use_module('src/methods/random_forest/random_forest_test.pl').
%%:- use_module('src/methods/random_forest/random_forest_test.pl').
:- use_module('src/methods/softmax_regression/softmax_regression_test.pl').
%%:- use_module('src/methods/softmax_regression/softmax_regression_test.pl').
:- use_module('src/methods/sparse_coding/sparse_coding_test.pl').
%% better to run the parse_coding tests alone because the c++ Method writes out alot of Debug messages that make the tests hard to read.
%%:- use_module('src/methods/sparse_coding/sparse_coding_test.pl').
:- use_module('src/helper_files/helper_tests.pl').
run :-
run_adaboost_tests,
run_approx_kfn_tests,
run_bayesian_linear_regression_tests,
run_dbscan_tests,
run_decision_tree_tests,
run_emst_tests,
run_fastmks_tests,
run_hoeffding_tree_tests,
run_kde_tests,
run_kernel_pca_tests,
run_kfn_tests,
run_kmeans_tests,
run_knn_tests,
run_lars_tests,
run_linear_regression_tests,
run_linear_SVM_tests,
run_lmnn_tests,
run_local_coordinate_coding_tests,
run_logistic_regression_tests,
run_lsh_tests,
run_mean_shift_tests,
run_naive_bayes_classifier_tests,
run_nca_tests,
run_nmf_tests,
run_pca_tests,
run_perceptron_tests,
run_radical_tests,
run_random_forest_tests,
run_softmax_regression_tests,
run_sparse_coding_tests,
run_helper_tests.
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment