Skip to content
Snippets Groups Projects
Commit 194e8e1c authored by Jakhes's avatar Jakhes
Browse files

Changing Naming schema for the predicates

parent f1ad5224
Branches
No related tags found
No related merge requests found
Showing
with 686 additions and 685 deletions
:- module(adaboost, [ initModelWithTraining/7, :- module(adaboost, [ adaboost_initModelWithTraining/7,
initModelNoTraining/2, adaboost_initModelNoTraining/2,
classify/5, adaboost_classify/5,
numClasses/1, adaboost_numClasses/1,
getTolerance/1, adaboost_getTolerance/1,
modifyTolerance/1, adaboost_modifyTolerance/1,
train/8]). adaboost_train/8]).
%% requirements of library(struct) %% requirements of library(struct)
:- load_files(library(str_decl), :- load_files(library(str_decl),
...@@ -38,7 +38,7 @@ ...@@ -38,7 +38,7 @@
%% %%
%% Initiates the Adaboostmodel and trains it, so classify can be used immediately. %% Initiates the Adaboostmodel and trains it, so classify can be used immediately.
%% %%
initModelWithTraining(MatList, MatRows, VecList, NumClasses, Learner, Iterations, Tolerance) :- adaboost_initModelWithTraining(MatList, MatRows, VecList, NumClasses, Learner, Iterations, Tolerance) :-
NumClasses >= 0, NumClasses >= 0,
Iterations >= 0, Iterations >= 0,
Tolerance > 0, Tolerance > 0,
...@@ -64,7 +64,7 @@ foreign(initModelWithTraining, c, initModelWithTrainingI(+pointer(float_array), ...@@ -64,7 +64,7 @@ foreign(initModelWithTraining, c, initModelWithTrainingI(+pointer(float_array),
%% Needs to be called first before all other predicates exept initModelWithTraining! %% Needs to be called first before all other predicates exept initModelWithTraining!
%% Initiates the Adaboostmodel but doesnt train it, so train has to be used first before classify can be used. %% Initiates the Adaboostmodel but doesnt train it, so train has to be used first before classify can be used.
%% %%
initModelNoTraining(Tolerance, Learner) :- adaboost_initModelNoTraining(Tolerance, Learner) :-
Tolerance > 0, Tolerance > 0,
initModelNoTrainingI(Tolerance, Learner). initModelNoTrainingI(Tolerance, Learner).
...@@ -81,7 +81,7 @@ foreign(initModelNoTraining, c, initModelNoTrainingI(+float32, +string)). ...@@ -81,7 +81,7 @@ foreign(initModelNoTraining, c, initModelNoTrainingI(+float32, +string)).
%% --Description-- %% --Description--
%% Classifies the given data into the number of classes the model was trained for. %% Classifies the given data into the number of classes the model was trained for.
%% %%
classify(TestList, TestRows, PredicList, ProbsList, ZRows) :- adaboost_classify(TestList, TestRows, PredicList, ProbsList, ZRows) :-
convert_list_to_float_array(TestList, TestRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(TestList, TestRows, array(Xsize, Xrownum, X)),
classifyI(X, Xsize, Xrownum, Y, Ysize, Z, ZCols, ZRows), classifyI(X, Xsize, Xrownum, Y, Ysize, Z, ZCols, ZRows),
convert_float_array_to_list(Y, Ysize, PredicList), convert_float_array_to_list(Y, Ysize, PredicList),
...@@ -102,7 +102,7 @@ foreign(classify, c, classifyI( +pointer(float_array), +integer, +integer, ...@@ -102,7 +102,7 @@ foreign(classify, c, classifyI( +pointer(float_array), +integer, +integer,
%% --Description-- %% --Description--
%% Returns the amount of classes defined in the model for classification. %% Returns the amount of classes defined in the model for classification.
%% %%
numClasses(ClassesNum) :- adaboost_numClasses(ClassesNum) :-
numClassesI(ClassesNum). numClassesI(ClassesNum).
foreign(numClasses, c, numClassesI([-integer])). foreign(numClasses, c, numClassesI([-integer])).
...@@ -116,7 +116,7 @@ foreign(numClasses, c, numClassesI([-integer])). ...@@ -116,7 +116,7 @@ foreign(numClasses, c, numClassesI([-integer])).
%% --Description-- %% --Description--
%% Returns the tolerance of the model. %% Returns the tolerance of the model.
%% %%
getTolerance(Tolerance) :- adaboost_getTolerance(Tolerance) :-
getToleranceI(Tolerance). getToleranceI(Tolerance).
foreign(getTolerance, c, getToleranceI([-float32])). foreign(getTolerance, c, getToleranceI([-float32])).
...@@ -130,7 +130,7 @@ foreign(getTolerance, c, getToleranceI([-float32])). ...@@ -130,7 +130,7 @@ foreign(getTolerance, c, getToleranceI([-float32])).
%% --Description-- %% --Description--
%% Modifies the tolerance of the model. %% Modifies the tolerance of the model.
%% %%
modifyTolerance(NewTolerance) :- adaboost_modifyTolerance(NewTolerance) :-
NewTolerance > 0, NewTolerance > 0,
modifyToleranceI(NewTolerance). modifyToleranceI(NewTolerance).
...@@ -149,7 +149,7 @@ foreign(modifyTolerance, c, modifyToleranceI(+float32)). ...@@ -149,7 +149,7 @@ foreign(modifyTolerance, c, modifyToleranceI(+float32)).
%% float double upper bound training error %% float double upper bound training error
%% %%
%% --Description-- %% --Description--
train(MatList, MatRows, VecList, NumClasses, Learner, Iterations, Tolerance, Error) :- adaboost_train(MatList, MatRows, VecList, NumClasses, Learner, Iterations, Tolerance, Error) :-
NumClasses >= 0, NumClasses >= 0,
Iterations >= 0, Iterations >= 0,
Tolerance >= 0, Tolerance >= 0,
......
...@@ -11,308 +11,309 @@ ...@@ -11,308 +11,309 @@
reset_Model_No_Train(Learner) :- reset_Model_No_Train(Learner) :-
initModelNoTraining(0.0001, Learner). adaboost_initModelNoTraining(0.0001, Learner).
reset_Model_With_Train(Learner) :- reset_Model_With_Train(Learner) :-
initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, Learner, 50, 0.0001). adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, Learner, 50, 0.0001).
%% %%
%% TESTING predicate initModelWithTraining/7 %% TESTING predicate adaboost_initModelWithTraining/7
%% %%
:- begin_tests(initModelWithTraining). :- begin_tests(adaboost_initModelWithTraining).
%% Failure Tests %% Failure Tests
test(initModelWithTraining_WrongInputTypes, fail) :- test(initModelWithTraining_WrongInputTypes, fail) :-
initModelWithTraining(wrong, 3, [0.2,0.2,0.2,0.2], 2, perceptron, 50, 0.0001). adaboost_initModelWithTraining(wrong, 3, [0.2,0.2,0.2,0.2], 2, perceptron, 50, 0.0001).
test(initModelWithTraining_WrongTol, fail) :- test(initModelWithTraining_WrongTol, fail) :-
initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], -2, perceptron, 50, 0.0001). adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], -2, perceptron, 50, 0.0001).
test(initModelWithTraining_WrongLearner, [error(domain_error('perceptron or decision_stump' ,wrongLearner), _)]) :- test(initModelWithTraining_WrongLearner, [error(domain_error('perceptron or decision_stump' ,wrongLearner), _)]) :-
initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, wrongLearner, 50, 0.0001). adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, wrongLearner, 50, 0.0001).
test(initModelWithTraining_WrongIterations, fail) :- test(initModelWithTraining_WrongIterations, fail) :-
initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, -50, 0.0001). adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, -50, 0.0001).
test(initModelWithTraining_WrongTol, fail) :- test(initModelWithTraining_WrongTol, fail) :-
initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, 50, -10.0). adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, 50, -10.0).
test(initModelWithTraining_MissmatchingLabels) :- test(initModelWithTraining_MissmatchingLabels) :-
initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0, 1 ,1], 2, perceptron, 50, 0.0001). adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0, 1 ,1], 2, perceptron, 50, 0.0001).
%% Successful Tests %% Successful Tests
test(initModelWithTraining_Perceptron) :- test(initModelWithTraining_Perceptron) :-
initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, 50, 0.0001). adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, 50, 0.0001).
test(initModelWithTraining_DecisionStump) :- test(initModelWithTraining_DecisionStump) :-
initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3], 3, [0, 0, 1], 2, decision_stump, 50, 0.0001). adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3], 3, [0, 0, 1], 2, decision_stump, 50, 0.0001).
:- end_tests(adaboost_initModelWithTraining).
:- end_tests(initModelWithTraining).
%% %%
%% TESTING predicate initModelNoTraining/2 %% TESTING predicate adaboost_initModelNoTraining/2
%% %%
:- begin_tests(initModelNoTrain). :- begin_tests(adaboost_initModelNoTraining).
%% Failure Tests %% Failure Tests
test(initModelNoTraining_WrongLearner, [error(domain_error('perceptron or decision_stump' ,wrongLearner), _)]) :- test(initModelNoTraining_WrongLearner, [error(domain_error('perceptron or decision_stump' ,wrongLearner), _)]) :-
initModelNoTraining(0.0001, wrongLearner). adaboost_initModelNoTraining(0.0001, wrongLearner).
test(initModelNoTraining_WrongTol, fail) :- test(initModelNoTraining_WrongTol, fail) :-
initModelNoTraining(-1.0, perceptron). adaboost_initModelNoTraining(-1.0, perceptron).
%% Successful Tests %% Successful Tests
test(initModelNoTraining_Perceptron) :- test(initModelNoTraining_Perceptron) :-
initModelNoTraining(0.001, perceptron). adaboost_initModelNoTraining(0.001, perceptron).
test(initModelNoTraining_DecisionStump) :- test(initModelNoTraining_DecisionStump) :-
initModelNoTraining(0.000014, decision_stump). adaboost_initModelNoTraining(0.000014, decision_stump).
:- end_tests(initModelNoTrain). :- end_tests(adaboost_initModelNoTraining).
%% %%
%% TESTING predicate classify/8 %% TESTING predicate adaboost_classify/8
%% %%
:- begin_tests(classify). :- begin_tests(adaboost_classify).
%% Failure Tests %% Failure Tests
test(classify_on_untrained_model, [error(_,system_error('The model is not trained!'))]) :- test(classify_on_untrained_model, [error(_,system_error('The model is not trained!'))]) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, _, _, _). adaboost_classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, _, _, _).
test(classify_with_bad_data_input_perceptron, [error(_,system_error('The given data matrix has incorrect dimensions compared to the training data!'))]) :- test(classify_with_bad_data_input_perceptron, [error(_,system_error('The given data matrix has incorrect dimensions compared to the training data!'))]) :-
reset_Model_With_Train(perceptron), reset_Model_With_Train(perceptron),
classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 2, _, _, _). adaboost_classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 2, _, _, _).
%% should cause an exeption but doesnt TODO: %% should cause an exeption but doesnt TODO:
test(classify_with_bad_data_input_decision_stump) :- test(classify_with_bad_data_input_decision_stump) :-
reset_Model_With_Train(decision_stump), reset_Model_With_Train(decision_stump),
classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 5, _, _, _). adaboost_classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 5, _, _, _).
%% Successful Tests %% Successful Tests
test(classify_perceptron) :- test(classify_perceptron) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
take_rows_from_iris_CSV(10, Records), take_rows_from_iris_CSV(10, Records),
train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _), adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _),
classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, adaboost_classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4,
[1.0,1.0,1.0,1.0,0.0], [1.0,1.0,1.0,1.0,0.0],
[[0.0,1.0],[0.0,1.0],[0.0,1.0],[0.0,1.0],[1.0,0.0]], 2). [[0.0,1.0],[0.0,1.0],[0.0,1.0],[0.0,1.0],[1.0,0.0]], 2).
test(classify_decision_stump) :- test(classify_decision_stump) :-
reset_Model_No_Train(decision_stump), reset_Model_No_Train(decision_stump),
take_rows_from_iris_CSV(10, Records), take_rows_from_iris_CSV(10, Records),
train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, decision_stump, 50, 0.0001, _), adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, decision_stump, 50, 0.0001, _),
classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, adaboost_classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4,
[1.0,1.0,1.0,1.0,1.0], [1.0,1.0,1.0,1.0,1.0],
[[0.0,1.0],[0.0,1.0],[0.0,1.0],[0.0,1.0],[0.0,1.0]], 2). [[0.0,1.0],[0.0,1.0],[0.0,1.0],[0.0,1.0],[0.0,1.0]], 2).
:- end_tests(classify). :- end_tests(adaboost_classify).
%% %%
%% TESTING predicate numClasses/1 %% TESTING predicate adaboost_numClasses/1
%% %%
:- begin_tests(numClasses). :- begin_tests(adaboost_numClasses).
test(numClasses_Perceptron_NoTrain, [true(Amount =:= 0)]) :- test(numClasses_Perceptron_NoTrain, [true(Amount =:= 0)]) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
numClasses(Amount). adaboost_numClasses(Amount).
test(numClasses_Decision_Stump_NoTrain, [true(Amount =:= 0)]) :- test(numClasses_Decision_Stump_NoTrain, [true(Amount =:= 0)]) :-
reset_Model_No_Train(decision_stump), reset_Model_No_Train(decision_stump),
numClasses(Amount). adaboost_numClasses(Amount).
test(numClasses_Perceptron_WithTrain, [true(Amount =:= 2)]) :- test(numClasses_Perceptron_WithTrain, [true(Amount =:= 2)]) :-
reset_Model_With_Train(perceptron), reset_Model_With_Train(perceptron),
numClasses(Amount). adaboost_numClasses(Amount).
test(numClasses_Decision_Stump_WithTrain, [true(Amount =:= 2)]) :- test(numClasses_Decision_Stump_WithTrain, [true(Amount =:= 2)]) :-
reset_Model_With_Train(decision_stump), reset_Model_With_Train(decision_stump),
numClasses(Amount). adaboost_numClasses(Amount).
test(numClasses_Custom_NumClasses, [true(Amount =:= 3)]) :- test(numClasses_Custom_NumClasses, [true(Amount =:= 3)]) :-
initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0001), adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0001),
numClasses(Amount). adaboost_numClasses(Amount).
test(numClasses_afterTrain_Perceptron, [true(Amount =:= 2)]) :- test(numClasses_afterTrain_Perceptron, [true(Amount =:= 2)]) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
take_rows_from_iris_CSV(10, Records), take_rows_from_iris_CSV(10, Records),
train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _), adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _),
numClasses(Amount). adaboost_numClasses(Amount).
:- end_tests(numClasses). :- end_tests(adaboost_numClasses).
%% %%
%% TESTING predicate getTolerance/1 %% TESTING predicate adaboost_getTolerance/1
%% %%
:- begin_tests(getTolerance). :- begin_tests(adaboost_getTolerance).
test(getTolerance_Perceptron_NoTrain, [true(Amount =:= 0.0001)]) :- test(getTolerance_Perceptron_NoTrain, [true(Amount =:= 0.0001)]) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
getTolerance(Amount). adaboost_getTolerance(Amount).
test(getTolerance_Decision_Stump_NoTrain, [true(Amount =:= 0.0001)]) :- test(getTolerance_Decision_Stump_NoTrain, [true(Amount =:= 0.0001)]) :-
reset_Model_No_Train(decision_stump), reset_Model_No_Train(decision_stump),
getTolerance(Amount). adaboost_getTolerance(Amount).
test(getTolerance_Perceptron_WithTrain, [true(Amount =:= 0.0001)]) :- test(getTolerance_Perceptron_WithTrain, [true(Amount =:= 0.0001)]) :-
reset_Model_With_Train(perceptron), reset_Model_With_Train(perceptron),
getTolerance(Amount). adaboost_getTolerance(Amount).
test(getTolerance_Decision_Stump_WithTrain, [true(Amount =:= 0.0001)]) :- test(getTolerance_Decision_Stump_WithTrain, [true(Amount =:= 0.0001)]) :-
reset_Model_With_Train(decision_stump), reset_Model_With_Train(decision_stump),
getTolerance(Amount). adaboost_getTolerance(Amount).
test(getTolerance_Custom_Tolerance, [true(Amount =:= 0.0009)]) :- test(getTolerance_Custom_Tolerance, [true(Amount =:= 0.0009)]) :-
initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0009), adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0009),
getTolerance(Amount). adaboost_getTolerance(Amount).
test(getTolerance_afterTrain, [true(Amount =:= 0.0005)]) :- test(getTolerance_afterTrain, [true(Amount =:= 0.0005)]) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
take_rows_from_iris_CSV(10, Records), take_rows_from_iris_CSV(10, Records),
train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0005, _), adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0005, _),
getTolerance(Amount). adaboost_getTolerance(Amount).
:- end_tests(getTolerance). :- end_tests(adaboost_getTolerance).
%% %%
%% TESTING predicate modifyTolerance/1 %% TESTING predicate adaboost_modifyTolerance/1
%% %%
:- begin_tests(modifyTolerance). :- begin_tests(adaboost_modifyTolerance).
%% Failure Tests %% Failure Tests
test(modifyTolerance_With_Negative_Input, fail) :- test(modifyTolerance_With_Negative_Input, fail) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
modifyTolerance(-0.02). adaboost_modifyTolerance(-0.02).
%% Successful Tests %% Successful Tests
test(modifyTolerance_Perceptron_NoTrain, [true(Amount =:= 0.02)]) :- test(modifyTolerance_Perceptron_NoTrain, [true(Amount =:= 0.02)]) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
modifyTolerance(0.02), adaboost_modifyTolerance(0.02),
getTolerance(Amount). adaboost_getTolerance(Amount).
test(modifyTolerance_Decision_Stump_NoTrain, [true(Amount =:= 0.02)]) :- test(modifyTolerance_Decision_Stump_NoTrain, [true(Amount =:= 0.02)]) :-
reset_Model_No_Train(decision_stump), reset_Model_No_Train(decision_stump),
modifyTolerance(0.02), adaboost_modifyTolerance(0.02),
getTolerance(Amount). adaboost_getTolerance(Amount).
test(modifyTolerance_Perceptron_WithTrain, [true(Amount =:= 0.02)]) :- test(modifyTolerance_Perceptron_WithTrain, [true(Amount =:= 0.02)]) :-
reset_Model_With_Train(perceptron), reset_Model_With_Train(perceptron),
modifyTolerance(0.02), adaboost_modifyTolerance(0.02),
getTolerance(Amount). adaboost_getTolerance(Amount).
test(modifyTolerance_Decision_Stump_WithTrain, [true(Amount =:= 0.02)]) :- test(modifyTolerance_Decision_Stump_WithTrain, [true(Amount =:= 0.02)]) :-
reset_Model_With_Train(decision_stump), reset_Model_With_Train(decision_stump),
modifyTolerance(0.02), adaboost_modifyTolerance(0.02),
getTolerance(Amount). adaboost_getTolerance(Amount).
test(modifyTolerance_Custom_Tolerance, [true(Amount =:= 0.02)]) :- test(modifyTolerance_Custom_Tolerance, [true(Amount =:= 0.02)]) :-
initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0001), adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0001),
modifyTolerance(0.02), adaboost_modifyTolerance(0.02),
getTolerance(Amount). adaboost_getTolerance(Amount).
test(modifyTolerance_afterTrain, [true(Amount =:= 0.02)]) :- test(modifyTolerance_afterTrain, [true(Amount =:= 0.02)]) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
take_rows_from_iris_CSV(10, Records), take_rows_from_iris_CSV(10, Records),
train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _), adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _),
modifyTolerance(0.02), adaboost_modifyTolerance(0.02),
getTolerance(Amount). adaboost_getTolerance(Amount).
:- end_tests(modifyTolerance). :- end_tests(adaboost_modifyTolerance).
%% %%
%% TESTING predicate train/8 %% TESTING predicate adaboost_train/8
%% %%
:- begin_tests(train). :- begin_tests(adaboost_train).
%% Failure Tests %% Failure Tests
test(train_With_Bad_NumClass_Input, fail) :- test(train_With_Bad_NumClass_Input, fail) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -2, perceptron, 50, -0.0001, _). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -2, perceptron, 50, -0.0001, _).
test(train_With_Bad_Learner_Input, [error(domain_error('perceptron or decision_stump' ,wrongLearner), _)]) :- test(train_With_Bad_Learner_Input, [error(domain_error('perceptron or decision_stump' ,wrongLearner), _)]) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, wrongLearner, 50, 0.0001, _). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, wrongLearner, 50, 0.0001, _).
test(train_With_Bad_Iterations_Input, fail) :- test(train_With_Bad_Iterations_Input, fail) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, -50, 0.0001, _). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, -50, 0.0001, _).
test(train_With_Bad_Tol_Input, fail) :- test(train_With_Bad_Tol_Input, fail) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, -0.0001, _). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, -0.0001, _).
test(train_With_Bad_Labels_Too_Many_Classes, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :- test(train_With_Bad_Labels_Too_Many_Classes, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
test(train_With_Bad_Labels_Negative_Perceptron, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :- test(train_With_Bad_Labels_Negative_Perceptron, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, perceptron, 50, 0.0001, _). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, perceptron, 50, 0.0001, _).
%% should cause an exeption but doesnt TODO: %% should cause an exeption but doesnt TODO:
test(train_With_Bad_Labels_Negative_Decision_Stump) :- test(train_With_Bad_Labels_Negative_Decision_Stump) :-
reset_Model_No_Train(decision_stump), reset_Model_No_Train(decision_stump),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, decision_stump, 50, 0.0001, _). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, decision_stump, 50, 0.0001, _).
%% seems to be allowed %% seems to be allowed
test(train_With_Too_Many_Labels) :- test(train_With_Too_Many_Labels) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,0,0,1], 2, perceptron, 50, 0.0001, _). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,0,0,1], 2, perceptron, 50, 0.0001, _).
test(train_With_Too_Little_Labels, [error(_,system_error('The given Labels Vector is too short!'))]) :- test(train_With_Too_Little_Labels, [error(_,system_error('The given Labels Vector is too short!'))]) :-
reset_Model_No_Train(decision_stump), reset_Model_No_Train(decision_stump),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, decision_stump, 50, 0.0001, _). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, decision_stump, 50, 0.0001, _).
test(train_With_Negative_RowNumber, fail) :- test(train_With_Negative_RowNumber, fail) :-
reset_Model_No_Train(decision_stump), reset_Model_No_Train(decision_stump),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], -3, [0,0,0,0], 2, decision_stump, 50, 0.0001, _). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], -3, [0,0,0,0], 2, decision_stump, 50, 0.0001, _).
%% Successful Tests %% Successful Tests
test(train_With_Direct_Input_Perceptron, [true(Error =:= 1)]) :- test(train_With_Direct_Input_Perceptron, [true(Error =:= 1)]) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
test(train_With_Data_From_CSV_Perceptron, [true(Error =:= 0.9797958971132711)]) :- test(train_With_Data_From_CSV_Perceptron, [true(Error =:= 0.9797958971132711)]) :-
reset_Model_No_Train(perceptron), reset_Model_No_Train(perceptron),
take_rows_from_iris_CSV(10, Records), take_rows_from_iris_CSV(10, Records),
train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error). adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
test(train_With_Direct_Input_Decision_Stump, [true(Error =:= 1)]) :- test(train_With_Direct_Input_Decision_Stump, [true(Error =:= 1)]) :-
reset_Model_No_Train(decision_stump), reset_Model_No_Train(decision_stump),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, decision_stump, 50, 0.0001, Error). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, decision_stump, 50, 0.0001, Error).
test(train_With_Data_From_CSV_Decision_Stump, [true(Error =:= 0.9797958971132711)]) :- test(train_With_Data_From_CSV_Decision_Stump, [true(Error =:= 0.9797958971132711)]) :-
reset_Model_No_Train(decision_stump), reset_Model_No_Train(decision_stump),
take_rows_from_iris_CSV(10, Records), take_rows_from_iris_CSV(10, Records),
train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, decision_stump, 50, 0.0001, Error). adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, decision_stump, 50, 0.0001, Error).
test(train_After_InitTrain_Perceptron, [true(Error =:= 1)]) :- test(train_After_InitTrain_Perceptron, [true(Error =:= 1)]) :-
reset_Model_With_Train(perceptron), reset_Model_With_Train(perceptron),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, perceptron, 100, 0.01, Error). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, perceptron, 100, 0.01, Error).
test(train_After_InitTrain_Decision_Stump, [true(Error =:= 1)]) :- test(train_After_InitTrain_Decision_Stump, [true(Error =:= 1)]) :-
reset_Model_With_Train(decision_stump), reset_Model_With_Train(decision_stump),
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, decision_stump, 100, 0.01, Error). adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, decision_stump, 100, 0.01, Error).
:- end_tests(train). :- end_tests(adaboost_train).
run_adaboost_tests :- run_adaboost_tests :-
run_tests. run_tests.
......
:- module(bayesian_linear_regression, [ initModel/4, :- module(bayesian_linear_regression, [ blr_initModel/4,
alpha/1, blr_alpha/1,
beta/1, blr_beta/1,
dataOffset/1, blr_dataOffset/1,
dataScale/1, blr_dataScale/1,
omega/1, blr_omega/1,
predict/3, blr_predict/3,
predictWithStd/4, blr_predictWithStd/4,
rmse/4, blr_rmse/4,
train/3, blr_train/3,
variance/1]). blr_variance/1]).
:- load_files(library(str_decl), :- load_files(library(str_decl),
[when(compile_time), if(changed)]). [when(compile_time), if(changed)]).
...@@ -39,7 +39,7 @@ ...@@ -39,7 +39,7 @@
%% Initiates the Model so now train/5 can be called. %% Initiates the Model so now train/5 can be called.
%% Before predict/5 or predictWitStd/7 can be used train/5 has to be called before %% Before predict/5 or predictWitStd/7 can be used train/5 has to be called before
%% %%
initModel(CenterData, ScaleData, NIterMax, Tol) :- blr_initModel(CenterData, ScaleData, NIterMax, Tol) :-
NIterMax >= 0, NIterMax >= 0,
Tol > 0, Tol > 0,
initModelI(CenterData, ScaleData, NIterMax, Tol). initModelI(CenterData, ScaleData, NIterMax, Tol).
...@@ -58,7 +58,7 @@ foreign(initModel, c, initModelI( +integer, ...@@ -58,7 +58,7 @@ foreign(initModel, c, initModelI( +integer,
%% Get the precision (or inverse variance) of the gaussian prior. %% Get the precision (or inverse variance) of the gaussian prior.
%% train/5 should be called before. %% train/5 should be called before.
%% %%
alpha(Alpha) :- blr_alpha(Alpha) :-
alphaI(Alpha). alphaI(Alpha).
foreign(alpha, c, alphaI([-float32])). foreign(alpha, c, alphaI([-float32])).
...@@ -73,7 +73,7 @@ foreign(alpha, c, alphaI([-float32])). ...@@ -73,7 +73,7 @@ foreign(alpha, c, alphaI([-float32])).
%% Get the precision (or inverse variance) beta of the model. %% Get the precision (or inverse variance) beta of the model.
%% train/5 should be called before. %% train/5 should be called before.
%% %%
beta(Beta) :- blr_beta(Beta) :-
betaI(Beta). betaI(Beta).
foreign(beta, c, betaI([-float32])). foreign(beta, c, betaI([-float32])).
...@@ -87,7 +87,7 @@ foreign(beta, c, betaI([-float32])). ...@@ -87,7 +87,7 @@ foreign(beta, c, betaI([-float32])).
%% --Description-- %% --Description--
%% Get the mean vector computed on the features over the training points. %% Get the mean vector computed on the features over the training points.
%% %%
dataOffset(ResponsesList) :- blr_dataOffset(ResponsesList) :-
dataOffsetI(X, Xsize), dataOffsetI(X, Xsize),
convert_float_array_to_list(X, Xsize, ResponsesList). convert_float_array_to_list(X, Xsize, ResponsesList).
...@@ -102,7 +102,7 @@ foreign(dataOffset, c, dataOffsetI(-pointer(float_array), -integer)). ...@@ -102,7 +102,7 @@ foreign(dataOffset, c, dataOffsetI(-pointer(float_array), -integer)).
%% --Description-- %% --Description--
%% Get the vector of standard deviations computed on the features over the training points. %% Get the vector of standard deviations computed on the features over the training points.
%% %%
dataScale(DataOffsetList) :- blr_dataScale(DataOffsetList) :-
dataScaleI(X, Xsize), dataScaleI(X, Xsize),
convert_float_array_to_list(X, Xsize, DataOffsetList). convert_float_array_to_list(X, Xsize, DataOffsetList).
...@@ -117,7 +117,7 @@ foreign(dataScale, c, dataScaleI(-pointer(float_array), -integer)). ...@@ -117,7 +117,7 @@ foreign(dataScale, c, dataScaleI(-pointer(float_array), -integer)).
%% --Description-- %% --Description--
%% Get the solution vector. %% Get the solution vector.
%% %%
omega(OmegaList) :- blr_omega(OmegaList) :-
omegaI(X, Xsize), omegaI(X, Xsize),
convert_float_array_to_list(X, Xsize, OmegaList). convert_float_array_to_list(X, Xsize, OmegaList).
...@@ -133,7 +133,7 @@ foreign(omega, c, omegaI(-pointer(float_array), -integer)). ...@@ -133,7 +133,7 @@ foreign(omega, c, omegaI(-pointer(float_array), -integer)).
%% --Description-- %% --Description--
%% Predict yi for each data point in the given data matrix using the currently-trained Bayesian Ridge model. %% Predict yi for each data point in the given data matrix using the currently-trained Bayesian Ridge model.
%% %%
predict(PointsList, PointsRows, PredictionsList) :- blr_predict(PointsList, PointsRows, PredictionsList) :-
convert_list_to_float_array(PointsList, PointsRows, array(Xsize, Xrows, X)), convert_list_to_float_array(PointsList, PointsRows, array(Xsize, Xrows, X)),
predictI(X, Xsize, Xrows, Y, Ysize), predictI(X, Xsize, Xrows, Y, Ysize),
convert_float_array_to_list(Y, Ysize, PredictionsList). convert_float_array_to_list(Y, Ysize, PredictionsList).
...@@ -152,7 +152,7 @@ foreign(predict, c, predictI( +pointer(float_array), +integer, +integer, ...@@ -152,7 +152,7 @@ foreign(predict, c, predictI( +pointer(float_array), +integer, +integer,
%% --Description-- %% --Description--
%% Predict yi and the standard deviation of the predictive posterior distribution for each data point in the given data matrix, using the currently-trained Bayesian Ridge estimator. %% Predict yi and the standard deviation of the predictive posterior distribution for each data point in the given data matrix, using the currently-trained Bayesian Ridge estimator.
%% %%
predictWithStd(PointsList, PointsRows, PredictionsList, STDList) :- blr_predictWithStd(PointsList, PointsRows, PredictionsList, STDList) :-
convert_list_to_float_array(PointsList, PointsRows, array(Xsize, Xrows, X)), convert_list_to_float_array(PointsList, PointsRows, array(Xsize, Xrows, X)),
predictWithStdI(X, Xsize, Xrows, Y, Ysize, Z, Zsize), predictWithStdI(X, Xsize, Xrows, Y, Ysize, Z, Zsize),
convert_float_array_to_list(Y, Ysize, PredictionsList), convert_float_array_to_list(Y, Ysize, PredictionsList),
...@@ -173,7 +173,7 @@ foreign(predictWithStd, c, predictWithStdI( +pointer(float_array), +integer, ...@@ -173,7 +173,7 @@ foreign(predictWithStd, c, predictWithStdI( +pointer(float_array), +integer,
%% --Description-- %% --Description--
%% Compute the Root Mean Square Error between the predictions returned by the model and the true responses. %% Compute the Root Mean Square Error between the predictions returned by the model and the true responses.
%% %%
rmse(DataList, DataRows, ResponsesList, RMSE) :- blr_rmse(DataList, DataRows, ResponsesList, RMSE) :-
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
convert_list_to_float_array(ResponsesList, array(Ysize, Y)), convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
rmseI(X, Xsize, Xrows, Y, Ysize, RMSE). rmseI(X, Xsize, Xrows, Y, Ysize, RMSE).
...@@ -193,7 +193,7 @@ foreign(rmse, c, rmseI(+pointer(float_array), +integer, +integer, ...@@ -193,7 +193,7 @@ foreign(rmse, c, rmseI(+pointer(float_array), +integer, +integer,
%% Run BayesianLinearRegression. %% Run BayesianLinearRegression.
%% The input matrix (like all mlpack matrices) should be column-major each column is an observation and each row is a dimension. %% The input matrix (like all mlpack matrices) should be column-major each column is an observation and each row is a dimension.
%% %%
train(DataList, DataRows, ResponsesList) :- blr_train(DataList, DataRows, ResponsesList) :-
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
convert_list_to_float_array(ResponsesList, array(Ysize, Y)), convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
trainI(X, Xsize, Xrows, Y, Ysize). trainI(X, Xsize, Xrows, Y, Ysize).
...@@ -211,7 +211,7 @@ foreign(train, c, trainI( +pointer(float_array), +integer, +integer, ...@@ -211,7 +211,7 @@ foreign(train, c, trainI( +pointer(float_array), +integer, +integer,
%% Get the estimate variance. %% Get the estimate variance.
%% train/5 should be called before. %% train/5 should be called before.
%% %%
variance(Variance) :- blr_variance(Variance) :-
varianceI(Variance). varianceI(Variance).
foreign(variance, c, varianceI([-float32])). foreign(variance, c, varianceI([-float32])).
......
...@@ -9,211 +9,211 @@ ...@@ -9,211 +9,211 @@
reset_Model :- reset_Model :-
initModel(1,0,50,0.0001). blr_initModel(1,0,50,0.0001).
%% %%
%% TESTING predicate initModel/4 %% TESTING predicate blr_initModel/4
%% %%
:- begin_tests(initModel). :- begin_tests(blr_initModel).
%% Failure Tests %% Failure Tests
test(bay_lin_reg_InitModel_Negative_NIterMax, fail) :- test(bay_lin_reg_InitModel_Negative_NIterMax, fail) :-
initModel(0,0,-50,0.0001). blr_initModel(0,0,-50,0.0001).
test(bay_lin_reg_InitModel_Negative_Tolerance, fail) :- test(bay_lin_reg_InitModel_Negative_Tolerance, fail) :-
initModel(0,0,50,-0.0001). blr_initModel(0,0,50,-0.0001).
%% Successful Tests %% Successful Tests
test(bay_lin_reg_InitModel_Default_Inputs) :- test(bay_lin_reg_InitModel_Default_Inputs) :-
initModel(1,0,50,0.0001). blr_initModel(1,0,50,0.0001).
test(bay_lin_reg_InitModel_Alternative_Inputs) :- test(bay_lin_reg_InitModel_Alternative_Inputs) :-
initModel(1,1,0,0.0071). blr_initModel(1,1,0,0.0071).
:- end_tests(initModel). :- end_tests(blr_initModel).
%% %%
%% TESTING predicate alpha/1 %% TESTING predicate blr_alpha/1
%% %%
:- begin_tests(alpha). :- begin_tests(blr_alpha).
%% Failure Tests %% Failure Tests
test(bay_lin_reg_Alpha_Wrong_Input, fail) :- test(bay_lin_reg_Alpha_Wrong_Input, fail) :-
reset_Model, reset_Model,
alpha(1). blr_alpha(1).
%% Successful Tests %% Successful Tests
test(bay_lin_reg_Alpha_Std_init, [true(Alpha =:= 0.0)]) :- test(bay_lin_reg_Alpha_Std_init, [true(Alpha =:= 0.0)]) :-
reset_Model, reset_Model,
alpha(Alpha). blr_alpha(Alpha).
test(bay_lin_reg_Alpha_After_Train, [true(Alpha =:= 0.12986500952614138)]) :- test(bay_lin_reg_Alpha_After_Train, [true(Alpha =:= 0.12986500952614138)]) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
alpha(Alpha). blr_alpha(Alpha).
:- end_tests(alpha). :- end_tests(blr_alpha).
%% %%
%% TESTING predicate beta/1 %% TESTING predicate blr_beta/1
%% %%
:- begin_tests(beta). :- begin_tests(blr_beta).
%% Failure Tests %% Failure Tests
test(bay_lin_reg_Beta_Wrong_Input, fail) :- test(bay_lin_reg_Beta_Wrong_Input, fail) :-
reset_Model, reset_Model,
beta(1). blr_beta(1).
%% Successful Tests %% Successful Tests
test(bay_lin_reg_Beta_Std_init, [true(Beta =:= 0.0)]) :- test(bay_lin_reg_Beta_Std_init, [true(Beta =:= 0.0)]) :-
reset_Model, reset_Model,
beta(Beta). blr_beta(Beta).
test(bay_lin_reg_Beta_After_Train, [true(Beta =:= 2.317989668988762E+31)]) :- test(bay_lin_reg_Beta_After_Train, [true(Beta =:= 2.317989668988762E+31)]) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
beta(Beta). blr_beta(Beta).
:- end_tests(beta). :- end_tests(blr_beta).
%% %%
%% TESTING predicate dataOffset/1 %% TESTING predicate blr_dataOffset/1
%% %%
:- begin_tests(dataOffset). :- begin_tests(blr_dataOffset).
%% Failure Tests %% Failure Tests
test(bay_lin_reg_DataOffset_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :- test(bay_lin_reg_DataOffset_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
reset_Model, reset_Model,
dataOffset(_). blr_dataOffset(_).
%% Successful Tests %% Successful Tests
test(bay_lin_reg_DataOffset_DirektInput) :- test(bay_lin_reg_DataOffset_DirektInput) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
dataOffset(ResponsesOffsetList), blr_dataOffset(ResponsesOffsetList),
print('\nResponsesOffset: '), print('\nResponsesOffset: '),
print(ResponsesOffsetList). print(ResponsesOffsetList).
test(bay_lin_reg_DataOffset_CSV_Input) :- test(bay_lin_reg_DataOffset_CSV_Input) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0]), blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
dataOffset(ResponsesOffsetList), blr_dataOffset(ResponsesOffsetList),
print('\nResponsesOffset: '), print('\nResponsesOffset: '),
print(ResponsesOffsetList). print(ResponsesOffsetList).
:- end_tests(dataOffset). :- end_tests(blr_dataOffset).
%% %%
%% TESTING predicate dataScale/1 %% TESTING predicate blr_dataScale/1
%% %%
:- begin_tests(dataScale). :- begin_tests(blr_dataScale).
%% Failure Tests %% Failure Tests
test(bay_lin_reg_DataScale_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :- test(bay_lin_reg_DataScale_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
reset_Model, reset_Model,
dataScale(_). blr_dataScale(_).
%% Successful Tests %% Successful Tests
test(bay_lin_reg_DataScale_DirektInput) :- test(bay_lin_reg_DataScale_DirektInput) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
dataScale(DataOffsetList), blr_dataScale(DataOffsetList),
print('\nDataOffset: '), print('\nDataOffset: '),
print(DataOffsetList). print(DataOffsetList).
test(bay_lin_reg_DataScale_CSV_Input) :- test(bay_lin_reg_DataScale_CSV_Input) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0]), blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
dataScale(DataOffsetList), blr_dataScale(DataOffsetList),
print('\nDataOffset: '), print('\nDataOffset: '),
print(DataOffsetList). print(DataOffsetList).
:- end_tests(dataScale). :- end_tests(blr_dataScale).
%% %%
%% TESTING predicate omega/1 %% TESTING predicate blr_omega/1
%% %%
:- begin_tests(omega). :- begin_tests(blr_omega).
%% Failure Tests %% Failure Tests
test(bay_lin_reg_Omega_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :- test(bay_lin_reg_Omega_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
reset_Model, reset_Model,
omega(_). blr_omega(_).
%% Successful Tests %% Successful Tests
test(bay_lin_reg_Omega_DirektInput) :- test(bay_lin_reg_Omega_DirektInput) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
omega(OmegaList), blr_omega(OmegaList),
print('\nOmega: '), print('\nOmega: '),
print(OmegaList). print(OmegaList).
test(bay_lin_reg_Omega_CSV_Input) :- test(bay_lin_reg_Omega_CSV_Input) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0]), blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
omega(OmegaList), blr_omega(OmegaList),
print('\nOmega: '), print('\nOmega: '),
print(OmegaList). print(OmegaList).
:- end_tests(omega). :- end_tests(blr_omega).
%% %%
%% TESTING predicate predict/3 %% TESTING predicate blr_predict/3
%% %%
:- begin_tests(predict). :- begin_tests(blr_predict).
%% Failure Tests %% Failure Tests
test(bay_lin_reg_Predict_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :- test(bay_lin_reg_Predict_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
reset_Model, reset_Model,
predict([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _). blr_predict([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _).
test(bay_lin_reg_Predict_Different_Dims_Than_Trained, [error(_,system_error('each_col(): incompatible size; expected 4x1, got 3x1'))]) :- test(bay_lin_reg_Predict_Different_Dims_Than_Trained, [error(_,system_error('each_col(): incompatible size; expected 4x1, got 3x1'))]) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
predict([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _). blr_predict([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _).
%% Successful Tests %% Successful Tests
test(bay_lin_reg_Predict_Direct_Input) :- test(bay_lin_reg_Predict_Direct_Input) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
predict([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredictionsList), blr_predict([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredictionsList),
print('\nPredictions: '), print('\nPredictions: '),
print(PredictionsList). print(PredictionsList).
...@@ -221,38 +221,38 @@ test(bay_lin_reg_Predict_CSV_Input) :- ...@@ -221,38 +221,38 @@ test(bay_lin_reg_Predict_CSV_Input) :-
reset_Model, reset_Model,
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0]), blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
predict([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredictionsList), blr_predict([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredictionsList),
print('\nPredictions: '), print('\nPredictions: '),
print(PredictionsList). print(PredictionsList).
:- end_tests(predict). :- end_tests(blr_predict).
%% %%
%% TESTING predicate predictWithStd/3 %% TESTING predicate blr_predictWithStd/3
%% %%
:- begin_tests(predictWithStd). :- begin_tests(blr_predictWithStd).
%% Failure Tests %% Failure Tests
test(bay_lin_reg_PredictWithStd_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :- test(bay_lin_reg_PredictWithStd_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
reset_Model, reset_Model,
predictWithStd([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _). blr_predictWithStd([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _).
test(bay_lin_reg_PredictWithStd_Different_Dims_Than_Trained, [error(_,system_error('each_col(): incompatible size; expected 4x1, got 3x1'))]) :- test(bay_lin_reg_PredictWithStd_Different_Dims_Than_Trained, [error(_,system_error('each_col(): incompatible size; expected 4x1, got 3x1'))]) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
predictWithStd([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _, _). blr_predictWithStd([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _, _).
%% Successful Tests %% Successful Tests
test(bay_lin_reg_PredictWithStd_Direct_Input) :- test(bay_lin_reg_PredictWithStd_Direct_Input) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
predictWithStd([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5], 3, PredictionsList, STDList), blr_predictWithStd([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5], 3, PredictionsList, STDList),
print('\nPredictions: '), print('\nPredictions: '),
print(PredictionsList), print(PredictionsList),
print('\nSTD: '), print('\nSTD: '),
...@@ -262,63 +262,63 @@ test(bay_lin_reg_PredictWithStd_CSV_Input) :- ...@@ -262,63 +262,63 @@ test(bay_lin_reg_PredictWithStd_CSV_Input) :-
reset_Model, reset_Model,
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0]), blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
predictWithStd([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredictionsList, STDList), blr_predictWithStd([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredictionsList, STDList),
print('\nPredictions: '), print('\nPredictions: '),
print(PredictionsList), print(PredictionsList),
print('\nSTD: '), print('\nSTD: '),
print(STDList). print(STDList).
:- end_tests(predictWithStd). :- end_tests(blr_predictWithStd).
%% %%
%% TESTING predicate rmse/4 %% TESTING predicate blr_rmse/4
%% %%
:- begin_tests(rmse). :- begin_tests(blr_rmse).
%% Failure Tests %% Failure Tests
test(bay_lin_reg_RMSE_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :- test(bay_lin_reg_RMSE_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
reset_Model, reset_Model,
rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], _). blr_rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], _).
test(bay_lin_reg_RMSE_Too_Small_Label_Dims, [error(_,system_error('subtraction: incompatible matrix dimensions: 1x2 and 1x4'))]) :- test(bay_lin_reg_RMSE_Too_Small_Label_Dims, [error(_,system_error('subtraction: incompatible matrix dimensions: 1x2 and 1x4'))]) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], _). blr_rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], _).
test(bay_lin_reg_RMSE_Too_Large_Label_Dims, [error(_,system_error('subtraction: incompatible matrix dimensions: 1x6 and 1x4'))]) :- test(bay_lin_reg_RMSE_Too_Large_Label_Dims, [error(_,system_error('subtraction: incompatible matrix dimensions: 1x6 and 1x4'))]) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,1,0,0,0], _). blr_rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,1,0,0,0], _).
%% doesnt cause an exception %% doesnt cause an exception
test(bay_lin_reg_RMSE_Wrong_Label_Value) :- test(bay_lin_reg_RMSE_Wrong_Label_Value) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1], _). blr_rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1], _).
test(bay_lin_reg_RMSE_Wrong_Data_Dims, [error(_,system_error('each_col(): incompatible size; expected 4x1, got 3x1'))]) :- test(bay_lin_reg_RMSE_Wrong_Data_Dims, [error(_,system_error('each_col(): incompatible size; expected 4x1, got 3x1'))]) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0,1], _). blr_rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0,1], _).
test(bay_lin_reg_RMSE_Wrong_Amount_Off_DataPoints, [error(_,system_error('subtraction: incompatible matrix dimensions: 1x10 and 1x5'))]) :- test(bay_lin_reg_RMSE_Wrong_Amount_Off_DataPoints, [error(_,system_error('subtraction: incompatible matrix dimensions: 1x10 and 1x5'))]) :-
reset_Model, reset_Model,
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0]), blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
rmse([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,1,0,1,1,1,0], _). blr_rmse([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,1,0,1,1,1,0], _).
%% Successful Tests %% Successful Tests
test(bay_lin_reg_RMSE_Direct_Input) :- test(bay_lin_reg_RMSE_Direct_Input) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], RMSE), blr_rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], RMSE),
print('\nRMSE: '), print('\nRMSE: '),
print(RMSE). print(RMSE).
...@@ -326,71 +326,71 @@ test(bay_lin_reg_RMSE_CSV_Input) :- ...@@ -326,71 +326,71 @@ test(bay_lin_reg_RMSE_CSV_Input) :-
reset_Model, reset_Model,
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,5, Data), take_csv_row(File, skipFirstRow,5, Data),
train(Data, 4, [0,1,0,1,1]), blr_train(Data, 4, [0,1,0,1,1]),
rmse([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,1], RMSE), blr_rmse([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,1], RMSE),
print('\nRMSE: '), print('\nRMSE: '),
print(RMSE). print(RMSE).
:- end_tests(rmse). :- end_tests(blr_rmse).
%% %%
%% TESTING predicate train/3 %% TESTING predicate blr_train/3
%% %%
:- begin_tests(train). :- begin_tests(blr_train).
%% Failure Tests %% Failure Tests
test(bay_lin_reg_Train_Too_Small_Label_Dims, [error(_,system_error('Target dim doesnt fit to the Data dim'))]) :- test(bay_lin_reg_Train_Too_Small_Label_Dims, [error(_,system_error('Target dim doesnt fit to the Data dim'))]) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1]). blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1]).
test(bay_lin_reg_Train_Too_Large_Label_Dims, [error(_,system_error('Target dim doesnt fit to the Data dim'))]) :- test(bay_lin_reg_Train_Too_Large_Label_Dims, [error(_,system_error('Target dim doesnt fit to the Data dim'))]) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,1,0,0,0]). blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,1,0,0,0]).
%% doesnt cause an Exception %% doesnt cause an Exception
test(bay_lin_reg_Train_Wrong_Label_Value) :- test(bay_lin_reg_Train_Wrong_Label_Value) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1]). blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1]).
%% Successful Tests %% Successful Tests
test(bay_lin_reg_Train_Direct_Input) :- test(bay_lin_reg_Train_Direct_Input) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]). blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]).
test(bay_lin_reg_Train_CSV_Input) :- test(bay_lin_reg_Train_CSV_Input) :-
reset_Model, reset_Model,
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0]). blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]).
:- end_tests(train). :- end_tests(blr_train).
%% %%
%% TESTING predicate variance/1 %% TESTING predicate blr_variance/1
%% %%
:- begin_tests(variance). :- begin_tests(blr_variance).
%% Failure Tests %% Failure Tests
test(bay_lin_reg_Variance_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :- test(bay_lin_reg_Variance_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
reset_Model, reset_Model,
variance(_). blr_variance(_).
%% Successful Tests %% Successful Tests
test(bay_lin_reg_Variance_After_Train, [true(Variance =:= 4.3140830754274083E-32)]) :- test(bay_lin_reg_Variance_After_Train, [true(Variance =:= 4.3140830754274083E-32)]) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]), blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
variance(Variance). blr_variance(Variance).
:- end_tests(variance). :- end_tests(blr_variance).
run_bayesian_linear_regression_tests :- run_bayesian_linear_regression_tests :-
......
:- module(decision_tree, [ initModel/7, :- module(decision_tree, [ decision_tree_initModel/7,
classifyPoint/3, decision_tree_classifyPoint/3,
classifyMatrix/5, decision_tree_classifyMatrix/5,
train/8]). decision_tree_train/8]).
%% requirements of library(struct) %% requirements of library(struct)
:- load_files(library(str_decl), :- load_files(library(str_decl),
...@@ -34,7 +34,7 @@ ...@@ -34,7 +34,7 @@
%% Construct the decision tree on the given data and labels, assuming that the data is all of the numeric type. %% Construct the decision tree on the given data and labels, assuming that the data is all of the numeric type.
%% Setting minimumLeafSize and minimumGainSplit too small may cause the tree to overfit, but setting them too large may cause it to underfit. %% Setting minimumLeafSize and minimumGainSplit too small may cause the tree to overfit, but setting them too large may cause it to underfit.
%% %%
initModel(DataList, DataRows, LabelsList, NumClasses, MinimumLeafSize, MinimumGainSplit, MaximumDepth) :- decision_tree_initModel(DataList, DataRows, LabelsList, NumClasses, MinimumLeafSize, MinimumGainSplit, MaximumDepth) :-
NumClasses >= 0, NumClasses >= 0,
MinimumLeafSize > 0, MinimumLeafSize > 0,
MinimumGainSplit > 0.0, MinimumGainSplit > 0.0,
...@@ -59,7 +59,7 @@ foreign(initModel, c, initModelI( +pointer(float_array), +integer, +intege ...@@ -59,7 +59,7 @@ foreign(initModel, c, initModelI( +pointer(float_array), +integer, +intege
%% --Description-- %% --Description--
%% Classify the given point and also return estimates of the probability for each class in the given vector. %% Classify the given point and also return estimates of the probability for each class in the given vector.
%% %%
classifyPoint(DataList, Prediction, AssignList) :- decision_tree_classifyPoint(DataList, Prediction, AssignList) :-
convert_list_to_float_array(DataList, array(Xsize, X)), convert_list_to_float_array(DataList, array(Xsize, X)),
classifyPointI(X, Xsize, Prediction, Y, Ysize), classifyPointI(X, Xsize, Prediction, Y, Ysize),
convert_float_array_to_list(Y, Ysize, AssignList). convert_float_array_to_list(Y, Ysize, AssignList).
...@@ -79,7 +79,7 @@ foreign(classifyPoint, c, classifyPointI(+pointer(float_array), +integer, ...@@ -79,7 +79,7 @@ foreign(classifyPoint, c, classifyPointI(+pointer(float_array), +integer,
%% --Description-- %% --Description--
%% Classify the given points and also return estimates of the probabilities for each class in the given matrix. %% Classify the given points and also return estimates of the probabilities for each class in the given matrix.
%% %%
classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols) :- decision_tree_classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols) :-
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
classifyMatrixI(X, Xsize, Xrows, Y, Ysize, Z, ZCols, ZRows), classifyMatrixI(X, Xsize, Xrows, Y, Ysize, Z, ZCols, ZRows),
convert_float_array_to_list(Y, Ysize, PredictionList), convert_float_array_to_list(Y, Ysize, PredictionList),
...@@ -105,7 +105,7 @@ foreign(classifyMatrix, c, classifyMatrixI( +pointer(float_array), +integer ...@@ -105,7 +105,7 @@ foreign(classifyMatrix, c, classifyMatrixI( +pointer(float_array), +integer
%% Train the decision tree on the given data, assuming that all dimensions are numeric. %% Train the decision tree on the given data, assuming that all dimensions are numeric.
%% This will overwrite the given model. Setting minimumLeafSize and minimumGainSplit too small may cause the tree to overfit, but setting them too large may cause it to underfit. %% This will overwrite the given model. Setting minimumLeafSize and minimumGainSplit too small may cause the tree to overfit, but setting them too large may cause it to underfit.
%% %%
train(DataList, DataRows, LabelsList, NumClasses, MinimumLeafSize, MinimumGainSplit, MaximumDepth, Entropy) :- decision_tree_train(DataList, DataRows, LabelsList, NumClasses, MinimumLeafSize, MinimumGainSplit, MaximumDepth, Entropy) :-
NumClasses >= 0, NumClasses >= 0,
MinimumLeafSize > 0, MinimumLeafSize > 0,
MinimumGainSplit > 0.0, MinimumGainSplit > 0.0,
......
...@@ -7,59 +7,59 @@ ...@@ -7,59 +7,59 @@
:- use_module('../../helper_files/helper.pl'). :- use_module('../../helper_files/helper.pl').
reset_Model_With_Train :- reset_Model_With_Train :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,0], 2, 10, 0.5, 0). decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,0], 2, 10, 0.5, 0).
%% %%
%% TESTING predicate initModel/7 %% TESTING predicate decision_tree_initModel/7
%% %%
:- begin_tests(initModel). :- begin_tests(decision_tree_initModel).
%% Failure Tests %% Failure Tests
test(decision_tree_Negative_NumClass, fail) :- test(decision_tree_Negative_NumClass, fail) :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -1, 1, 0.5, 0). decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -1, 1, 0.5, 0).
test(decision_tree_Negative_LeafSize, fail) :- test(decision_tree_Negative_LeafSize, fail) :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, -1, 0.5, 0). decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, -1, 0.5, 0).
test(decision_tree_Negative_GainSplit, fail) :- test(decision_tree_Negative_GainSplit, fail) :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, -0.5, 0). decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, -0.5, 0).
test(decision_tree_Too_High_GainSplit, fail) :- test(decision_tree_Too_High_GainSplit, fail) :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 1.5, 0). decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 1.5, 0).
test(decision_tree_Negative_MaxDepth, fail) :- test(decision_tree_Negative_MaxDepth, fail) :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 0.5, -1). decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 0.5, -1).
test(decision_tree_Init_With_Wrong_Label_Dims1, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :- test(decision_tree_Init_With_Wrong_Label_Dims1, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0], 1, 1, 0.5, 1). decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0], 1, 1, 0.5, 1).
%% If the label vector is to long it seems to cause no problems %% If the label vector is to long it seems to cause no problems
test(decision_tree_Init_With_Wrong_Label_Dims2) :- test(decision_tree_Init_With_Wrong_Label_Dims2) :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0,0,0], 1, 1, 0.5, 1). decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0,0,0], 1, 1, 0.5, 1).
%% The same when the label values are out of range %% The same when the label values are out of range
test(decision_tree_Init_With_Wrong_Label_Value) :- test(decision_tree_Init_With_Wrong_Label_Value) :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1], 1, 1, 0.5, 1). decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1], 1, 1, 0.5, 1).
%% Successful Tests %% Successful Tests
test(initModel_Direkt_Input_Use) :- test(initModel_Direkt_Input_Use) :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, 10, 0.5, 0). decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, 10, 0.5, 0).
test(initModel_Direkt_CSV_Use) :- test(initModel_Direkt_CSV_Use) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initModel(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 2, 0.7, 3). decision_tree_initModel(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 2, 0.7, 3).
:- end_tests(initModel). :- end_tests(decision_tree_initModel).
%% %%
%% TESTING predicate classifyPoint/3 %% TESTING predicate decision_tree_classifyPoint/3
%% %%
:- begin_tests(classifyPoint). :- begin_tests(decision_tree_classifyPoint).
%% Failure Tests %% Failure Tests
...@@ -67,7 +67,7 @@ test(initModel_Direkt_CSV_Use) :- ...@@ -67,7 +67,7 @@ test(initModel_Direkt_CSV_Use) :-
%% so im not certain if this should be forced to fail %% so im not certain if this should be forced to fail
test(classify_Point_With_Wrong_Dims) :- test(classify_Point_With_Wrong_Dims) :-
reset_Model_With_Train, reset_Model_With_Train,
classifyPoint([5.1,3.5,1.4,1.2,3.3], Prediction, AssignList), decision_tree_classifyPoint([5.1,3.5,1.4,1.2,3.3], Prediction, AssignList),
print(Prediction), print(Prediction),
print('\n'), print('\n'),
print(AssignList). print(AssignList).
...@@ -77,25 +77,25 @@ test(classify_Point_With_Wrong_Dims) :- ...@@ -77,25 +77,25 @@ test(classify_Point_With_Wrong_Dims) :-
test(classify_Point1) :- test(classify_Point1) :-
reset_Model_With_Train, reset_Model_With_Train,
classifyPoint([5.1,3.5,1.4], Prediction, AssignList), decision_tree_classifyPoint([5.1,3.5,1.4], Prediction, AssignList),
print(Prediction), print(Prediction),
print('\n'), print('\n'),
print(AssignList). print(AssignList).
test(classify_Point2) :- test(classify_Point2) :-
reset_Model_With_Train, reset_Model_With_Train,
classifyPoint([6.2,1.9,2.3], Prediction, AssignList), decision_tree_classifyPoint([6.2,1.9,2.3], Prediction, AssignList),
print(Prediction), print(Prediction),
print('\n'), print('\n'),
print(AssignList). print(AssignList).
:- end_tests(classifyPoint). :- end_tests(decision_tree_classifyPoint).
%% %%
%% TESTING predicate classifyMatrix/4 %% TESTING predicate decision_tree_classifyMatrix/4
%% %%
:- begin_tests(classifyMatrix). :- begin_tests(decision_tree_classifyMatrix).
%% Failure Tests %% Failure Tests
...@@ -103,7 +103,7 @@ test(classify_Point2) :- ...@@ -103,7 +103,7 @@ test(classify_Point2) :-
%% so im not certain if this should be forced to fail %% so im not certain if this should be forced to fail
test(classify_Matrix_With_Wrong_Dims1) :- test(classify_Matrix_With_Wrong_Dims1) :-
reset_Model_With_Train, reset_Model_With_Train,
classifyMatrix([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 5, PredictionList, ProbsList, _), decision_tree_classifyMatrix([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 5, PredictionList, ProbsList, _),
print(PredictionList), print(PredictionList),
print('\n'), print('\n'),
print(ProbsList). print(ProbsList).
...@@ -112,7 +112,7 @@ test(classify_Matrix_With_Wrong_Dims1) :- ...@@ -112,7 +112,7 @@ test(classify_Matrix_With_Wrong_Dims1) :-
%% so im not certain if this should be forced to fail %% so im not certain if this should be forced to fail
test(classify_Matrix_With_Wrong_Dims2) :- test(classify_Matrix_With_Wrong_Dims2) :-
reset_Model_With_Train, reset_Model_With_Train,
classifyMatrix([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 2, PredictionList, ProbsList, _), decision_tree_classifyMatrix([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 2, PredictionList, ProbsList, _),
print(PredictionList), print(PredictionList),
print('\n'), print('\n'),
print(ProbsList). print(ProbsList).
...@@ -121,22 +121,22 @@ test(classify_Matrix_With_Wrong_Dims2) :- ...@@ -121,22 +121,22 @@ test(classify_Matrix_With_Wrong_Dims2) :-
%% Successful Tests %% Successful Tests
test(classify_Matrix_Wierd_Trained_Labels) :- test(classify_Matrix_Wierd_Trained_Labels) :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1], 1, 1, 0.5, 1), decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1], 1, 1, 0.5, 1),
classifyMatrix([5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4], 3, PredictionList, ProbsList, _), decision_tree_classifyMatrix([5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4], 3, PredictionList, ProbsList, _),
print(PredictionList), print(PredictionList),
print('\n'), print('\n'),
print(ProbsList). print(ProbsList).
test(classify_Matrix_Direkt_Input1) :- test(classify_Matrix_Direkt_Input1) :-
reset_Model_With_Train, reset_Model_With_Train,
classifyMatrix([5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4], 3, PredictionList, ProbsList, _), decision_tree_classifyMatrix([5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4], 3, PredictionList, ProbsList, _),
print(PredictionList), print(PredictionList),
print('\n'), print('\n'),
print(ProbsList). print(ProbsList).
test(classify_Matrix_Direkt_Input2) :- test(classify_Matrix_Direkt_Input2) :-
reset_Model_With_Train, reset_Model_With_Train,
classifyMatrix([2, 2, 3, 5, 1, 4, 1, 1, 4, 0, 3, 5, 0, 5, 5], 3, PredictionList, ProbsList, _), decision_tree_classifyMatrix([2, 2, 3, 5, 1, 4, 1, 1, 4, 0, 3, 5, 0, 5, 5], 3, PredictionList, ProbsList, _),
print(PredictionList), print(PredictionList),
print('\n'), print('\n'),
print(ProbsList). print(ProbsList).
...@@ -144,69 +144,69 @@ test(classify_Matrix_Direkt_Input2) :- ...@@ -144,69 +144,69 @@ test(classify_Matrix_Direkt_Input2) :-
test(classify_Matrix_CSV_Trained) :- test(classify_Matrix_CSV_Trained) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,30, Data), take_csv_row(File, skipFirstRow,30, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0], 2, 5, 0.0007, 0, _), decision_tree_train(Data, 4, [0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0], 2, 5, 0.0007, 0, _),
classifyMatrix([2, 2, 3, 5, 1, 4, 1, 1, 4, 0, 3, 5, 0, 5, 5, 2, 2, 6, 0, 1], 4, PredictionList, ProbsList, _), decision_tree_classifyMatrix([2, 2, 3, 5, 1, 4, 1, 1, 4, 0, 3, 5, 0, 5, 5, 2, 2, 6, 0, 1], 4, PredictionList, ProbsList, _),
print(PredictionList), print(PredictionList),
print('\n'), print('\n'),
print(ProbsList). print(ProbsList).
:- end_tests(classifyMatrix). :- end_tests(decision_tree_classifyMatrix).
%% %%
%% TESTING predicate train/8 %% TESTING predicate decision_tree_train/8
%% %%
:- begin_tests(train). :- begin_tests(decision_tree_train).
%% Failure Tests %% Failure Tests
test(decision_tree_Train_Negative_NumClass, fail) :- test(decision_tree_Train_Negative_NumClass, fail) :-
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -1, 1, 0.5, 0, _). decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -1, 1, 0.5, 0, _).
test(decision_tree_Train_Negative_LeafSize, fail) :- test(decision_tree_Train_Negative_LeafSize, fail) :-
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, -1, 0.5, 0, _). decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, -1, 0.5, 0, _).
test(decision_tree_Train_Negative_GainSplit, fail) :- test(decision_tree_Train_Negative_GainSplit, fail) :-
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, -0.5, 0, _). decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, -0.5, 0, _).
test(decision_tree_Train_Too_High_GainSplit, fail) :- test(decision_tree_Train_Too_High_GainSplit, fail) :-
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 1.5, 0, _). decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 1.5, 0, _).
test(decision_tree_Train_Negative_MaxDepth, fail) :- test(decision_tree_Train_Negative_MaxDepth, fail) :-
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 0.5, -1, _). decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 0.5, -1, _).
test(decision_tree_Train_Wrong_Label_Dims1, [error(_,system_error('DecisionTree::Train(): number of points (4) does not match number of labels (2)!\n'))]) :- test(decision_tree_Train_Wrong_Label_Dims1, [error(_,system_error('DecisionTree::Train(): number of points (4) does not match number of labels (2)!\n'))]) :-
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0], 1, 1, 0.5, 1, _). decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0], 1, 1, 0.5, 1, _).
test(decision_tree_Train_Wrong_Label_Dims2, [error(_,system_error('DecisionTree::Train(): number of points (4) does not match number of labels (6)!\n'))]) :- test(decision_tree_Train_Wrong_Label_Dims2, [error(_,system_error('DecisionTree::Train(): number of points (4) does not match number of labels (6)!\n'))]) :-
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0,0,0], 1, 1, 0.5, 1, _). decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0,0,0], 1, 1, 0.5, 1, _).
%% there seems to be no check for the label values %% there seems to be no check for the label values
test(decision_tree_Train_Wrong_Labels) :- test(decision_tree_Train_Wrong_Labels) :-
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [-1,0,0,5], 1, 1, 0.5, 1, _). decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [-1,0,0,5], 1, 1, 0.5, 1, _).
%% Successful Tests %% Successful Tests
test(initModel_Direkt_Input_Use, [true(Entropy =:= 0.0)]) :- test(initModel_Direkt_Input_Use, [true(Entropy =:= 0.0)]) :-
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, 10, 0.5, 0, Entropy). decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, 10, 0.5, 0, Entropy).
test(initModel_Direkt_CSV_Use, [true(Entropy =:= 0.48)]) :- test(initModel_Direkt_CSV_Use, [true(Entropy =:= 0.48)]) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 2, 0.7, 3, Entropy). decision_tree_train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 2, 0.7, 3, Entropy).
test(initModel_Direkt_Input_Use, [true(Entropy =:= 0.0)]) :- test(initModel_Direkt_Input_Use, [true(Entropy =:= 0.0)]) :-
reset_Model_With_Train, reset_Model_With_Train,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, 10, 0.7, 0, Entropy). decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, 10, 0.7, 0, Entropy).
test(initModel_Direkt_CSV_Use, [true(Entropy =:= 0.3767195767195767)]) :- test(initModel_Direkt_CSV_Use, [true(Entropy =:= 0.3767195767195767)]) :-
reset_Model_With_Train, reset_Model_With_Train,
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,30, Data), take_csv_row(File, skipFirstRow,30, Data),
train(Data, 4, [0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0], 2, 5, 0.0005, 0, Entropy). decision_tree_train(Data, 4, [0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0], 2, 5, 0.0005, 0, Entropy).
:- end_tests(train). :- end_tests(decision_tree_train).
run_decision_tree_tests :- run_decision_tree_tests :-
run_tests. run_tests.
:- module(fastmks, [ initModel/10, :- module(fastmks, [ fastmks_initModel/10,
searchWithQuery/8, fastmks_searchWithQuery/8,
searchNoQuery/5]). fastmks_searchNoQuery/5]).
%% requirements of library(struct) %% requirements of library(struct)
:- load_files(library(str_decl), :- load_files(library(str_decl),
...@@ -35,7 +35,7 @@ ...@@ -35,7 +35,7 @@
%% --Description-- %% --Description--
%% Initializes the model on the given reference set. %% Initializes the model on the given reference set.
%% %%
initModel(DataList, DataRows, Kernel, Degree, Offset, Bandwidth, Scale, SingleMode, Naive, Base) :- fastmks_initModel(DataList, DataRows, Kernel, Degree, Offset, Bandwidth, Scale, SingleMode, Naive, Base) :-
Base > 1.0, Base > 1.0,
Bandwidth > 0.0, Bandwidth > 0.0,
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
...@@ -61,7 +61,7 @@ foreign(initModel, c, initModelI( +pointer(float_array), +integer, +intege ...@@ -61,7 +61,7 @@ foreign(initModel, c, initModelI( +pointer(float_array), +integer, +intege
%% --Description-- %% --Description--
%% Search with a different query set. %% Search with a different query set.
%% %%
searchWithQuery(DataList, DataRows, K, IndicesList, YCols, KernelsList, ZCols, Base) :- fastmks_searchWithQuery(DataList, DataRows, K, IndicesList, YCols, KernelsList, ZCols, Base) :-
K > 0, K > 0,
Base > 1.0, Base > 1.0,
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
...@@ -87,7 +87,7 @@ foreign(searchWithQuery, c, searchWithQueryI( +pointer(float_array), +integer, ...@@ -87,7 +87,7 @@ foreign(searchWithQuery, c, searchWithQueryI( +pointer(float_array), +integer,
%% --Description-- %% --Description--
%% Search with the reference set as the query set. %% Search with the reference set as the query set.
%% %%
searchNoQuery(K, IndicesList, YCols, KernelsList, ZCols) :- fastmks_searchNoQuery(K, IndicesList, YCols, KernelsList, ZCols) :-
K > 0, K > 0,
searchNoQueryI(K, Y, YCols, YRows, Z, ZCols, ZRows), searchNoQueryI(K, Y, YCols, YRows, Z, ZCols, ZRows),
convert_float_array_to_2d_list(Y, YCols, YRows, IndicesList), convert_float_array_to_2d_list(Y, YCols, YRows, IndicesList),
......
...@@ -9,29 +9,29 @@ ...@@ -9,29 +9,29 @@
reset_Model :- reset_Model :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initModel(Data, 4, linear, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.2). fastmks_initModel(Data, 4, linear, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.2).
%% %%
%% TESTING predicate initModel/10 %% TESTING predicate fastmks_initModel/10
%% %%
:- begin_tests(initModel). :- begin_tests(fastmks_initModel).
%% Failure Tests %% Failure Tests
test(searchWithQuery_Fastmks_Search_Before_Init, [error(_,system_error('The Model hasnt been trained yet!'))]) :- test(searchWithQuery_Fastmks_Search_Before_Init, [error(_,system_error('The Model hasnt been trained yet!'))]) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,5, Data), take_csv_row(File, skipFirstRow,5, Data),
searchWithQuery(Data, 4, 2, _, _, _, _, 1.1). fastmks_searchWithQuery(Data, 4, 2, _, _, _, _, 1.1).
test(searchNoQuery_Fastmks_Search_Before_Init, [error(_,system_error('The Model hasnt been trained yet!'))]) :- test(searchNoQuery_Fastmks_Search_Before_Init, [error(_,system_error('The Model hasnt been trained yet!'))]) :-
searchNoQuery(2, _, _, _, _). fastmks_searchNoQuery(2, _, _, _, _).
test(initModel_Fatsmks_WrongKernel_Input, [error(domain_error('The given kernel is unkown!' , wrongKernel), _)]) :- test(initModel_Fatsmks_WrongKernel_Input, [error(domain_error('The given kernel is unkown!' , wrongKernel), _)]) :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, wrongKernel, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.2). fastmks_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, wrongKernel, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.2).
test(initModel_Fatsmks_Bad_Base_Input, fail) :- test(initModel_Fatsmks_Bad_Base_Input, fail) :-
initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, linear, 0.0, 0.0, 1.0, 0.0, 0, 0, -0.1). fastmks_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, linear, 0.0, 0.0, 1.0, 0.0, 0, 0, -0.1).
%% Successful Tests %% Successful Tests
...@@ -39,45 +39,45 @@ test(initModel_Fatsmks_Bad_Base_Input, fail) :- ...@@ -39,45 +39,45 @@ test(initModel_Fatsmks_Bad_Base_Input, fail) :-
test(iniModel_Fastmks_Linear) :- test(iniModel_Fastmks_Linear) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initModel(Data, 4, linear, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5). fastmks_initModel(Data, 4, linear, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
test(iniModel_Fastmks_Polynomial) :- test(iniModel_Fastmks_Polynomial) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initModel(Data, 4, polynomial, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5). fastmks_initModel(Data, 4, polynomial, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
test(iniModel_Fastmks_Cosine) :- test(iniModel_Fastmks_Cosine) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initModel(Data, 4, cosine, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5). fastmks_initModel(Data, 4, cosine, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
test(iniModel_Fastmks_Gaussian) :- test(iniModel_Fastmks_Gaussian) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initModel(Data, 4, gaussian, 0.5, 0.5, 0.5, 0.5, 0, 0, 1.5). fastmks_initModel(Data, 4, gaussian, 0.5, 0.5, 0.5, 0.5, 0, 0, 1.5).
test(iniModel_Fastmks_Epanechnikov) :- test(iniModel_Fastmks_Epanechnikov) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initModel(Data, 4, epanechnikov, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5). fastmks_initModel(Data, 4, epanechnikov, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
test(iniModel_Fastmks_Triangular) :- test(iniModel_Fastmks_Triangular) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initModel(Data, 4, triangular, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5). fastmks_initModel(Data, 4, triangular, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
test(iniModel_Fastmks_Hyptan) :- test(iniModel_Fastmks_Hyptan) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initModel(Data, 4, hyptan, 0.0, 0.0, 1.0, 1.0, 0, 0, 1.5). fastmks_initModel(Data, 4, hyptan, 0.0, 0.0, 1.0, 1.0, 0, 0, 1.5).
:- end_tests(initModel). :- end_tests(fastmks_initModel).
%% %%
%% TESTING predicate searchWithQuery/8 %% TESTING predicate fastmks_searchWithQuery/8
%% %%
:- begin_tests(searchWithQuery). :- begin_tests(fastmks_searchWithQuery).
%% Failure Tests %% Failure Tests
...@@ -85,20 +85,20 @@ test(searchWithQuery_Fastmks_Negative_K, fail) :- ...@@ -85,20 +85,20 @@ test(searchWithQuery_Fastmks_Negative_K, fail) :-
reset_Model, reset_Model,
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,5, Data), take_csv_row(File, skipFirstRow,5, Data),
searchWithQuery(Data, 4, -2, _, _, _, _, 1.1). fastmks_searchWithQuery(Data, 4, -2, _, _, _, _, 1.1).
test(searchWithQuery_Fastmks_Negative_Base, fail) :- test(searchWithQuery_Fastmks_Negative_Base, fail) :-
reset_Model, reset_Model,
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,5, Data), take_csv_row(File, skipFirstRow,5, Data),
searchWithQuery(Data, 4, 2, _, _, _, _, -1.1). fastmks_searchWithQuery(Data, 4, 2, _, _, _, _, -1.1).
%% Successful Tests %% Successful Tests
test(searchWithQuery_Fastmks_New_Query) :- test(searchWithQuery_Fastmks_New_Query) :-
reset_Model, reset_Model,
searchWithQuery([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, 2, IndicesList, _, KernelsList, _, 1.1), fastmks_searchWithQuery([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, 2, IndicesList, _, KernelsList, _, 1.1),
print('Indices:\n'), print('Indices:\n'),
print(IndicesList), print(IndicesList),
print('Kernels:\n'), print('Kernels:\n'),
...@@ -108,32 +108,32 @@ test(searchWithQuery_Fastmks_Training_Data_Query) :- ...@@ -108,32 +108,32 @@ test(searchWithQuery_Fastmks_Training_Data_Query) :-
reset_Model, reset_Model,
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,5, Data), take_csv_row(File, skipFirstRow,5, Data),
searchWithQuery(Data, 4, 2, IndicesList, _, KernelsList, _, 1.1), fastmks_searchWithQuery(Data, 4, 2, IndicesList, _, KernelsList, _, 1.1),
print('Indices:\n'), print('Indices:\n'),
print(IndicesList), print(IndicesList),
print('Kernels:\n'), print('Kernels:\n'),
print(KernelsList). print(KernelsList).
:- end_tests(searchWithQuery). :- end_tests(fastmks_searchWithQuery).
%% %%
%% TESTING predicate searchNoQuery/5 %% TESTING predicate fastmks_searchNoQuery/5
%% %%
:- begin_tests(searchNoQuery). :- begin_tests(fastmks_searchNoQuery).
%% Failure Tests %% Failure Tests
test(searchNoQuery_Fastmks_Negative_K, fail) :- test(searchNoQuery_Fastmks_Negative_K, fail) :-
reset_Model, reset_Model,
searchNoQuery(-2, _, _, _, _). fastmks_searchNoQuery(-2, _, _, _, _).
%% Successful Tests %% Successful Tests
test(testDescription) :- test(testDescription) :-
reset_Model, reset_Model,
searchNoQuery(2, IndicesList, _, KernelsList, _), fastmks_searchNoQuery(2, IndicesList, _, KernelsList, _),
print('Indices:\n'), print('Indices:\n'),
print(IndicesList), print(IndicesList),
print('Kernels:\n'), print('Kernels:\n'),
...@@ -141,13 +141,13 @@ test(testDescription) :- ...@@ -141,13 +141,13 @@ test(testDescription) :-
test(testDescription) :- test(testDescription) :-
reset_Model, reset_Model,
searchNoQuery(5, IndicesList, _, KernelsList, _), fastmks_searchNoQuery(5, IndicesList, _, KernelsList, _),
print('Indices:\n'), print('Indices:\n'),
print(IndicesList), print(IndicesList),
print('Kernels:\n'), print('Kernels:\n'),
print(KernelsList). print(KernelsList).
:- end_tests(searchNoQuery). :- end_tests(fastmks_searchNoQuery).
run_fastmks_tests :- run_fastmks_tests :-
run_tests. run_tests.
......
:- module(hoeffding_tree, [ initAndBuildModel/12, :- module(hoeffding_tree, [ hoeffding_tree_initAndBuildModel/12,
classify/4, hoeffding_tree_classify/4,
train/4]). hoeffding_tree_train/4]).
%% requirements of library(struct) %% requirements of library(struct)
:- load_files(library(str_decl), :- load_files(library(str_decl),
...@@ -38,7 +38,7 @@ ...@@ -38,7 +38,7 @@
%% Construct the Hoeffding tree with the given parameters and given training data. %% Construct the Hoeffding tree with the given parameters and given training data.
%% The tree may be trained either in batch mode (which looks at all points before splitting, and propagates these points to the created children for further training), or in streaming mode, where each point is only considered once. (In general, batch mode will give better-performing trees, but will have higher memory and runtime costs for the same dataset.) %% The tree may be trained either in batch mode (which looks at all points before splitting, and propagates these points to the created children for further training), or in streaming mode, where each point is only considered once. (In general, batch mode will give better-performing trees, but will have higher memory and runtime costs for the same dataset.)
%% %%
initAndBuildModel(TreeType, DataList, DataRows, LabelsList, NumClasses, BatchTraining, SuccessProbability, MaxSamples, CheckInterval, MinSamples, Bins, ObservationsBeforeBinning) :- hoeffding_tree_initAndBuildModel(TreeType, DataList, DataRows, LabelsList, NumClasses, BatchTraining, SuccessProbability, MaxSamples, CheckInterval, MinSamples, Bins, ObservationsBeforeBinning) :-
NumClasses >= 0, NumClasses >= 0,
SuccessProbability >= 0, SuccessProbability >= 0,
SuccessProbability =< 1, SuccessProbability =< 1,
...@@ -69,7 +69,7 @@ foreign(initAndBuildModel, c, initAndBuildModelI( +string, ...@@ -69,7 +69,7 @@ foreign(initAndBuildModel, c, initAndBuildModelI( +string,
%% Classify the given points, using this node and the entire (sub)tree beneath it. %% Classify the given points, using this node and the entire (sub)tree beneath it.
%% The predicted labels for each point are returned, as well as an estimate of the probability that the prediction is correct for each point. This estimate is simply the MajorityProbability for the leaf that each point bins to. %% The predicted labels for each point are returned, as well as an estimate of the probability that the prediction is correct for each point. This estimate is simply the MajorityProbability for the leaf that each point bins to.
%% %%
classify(TestList, TestRows, PredicList, ProbsList) :- hoeffding_tree_classify(TestList, TestRows, PredicList, ProbsList) :-
convert_list_to_float_array(TestList, TestRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(TestList, TestRows, array(Xsize, Xrownum, X)),
classifyI(X, Xsize, Xrownum, Y, Ysize, Z, Zsize), classifyI(X, Xsize, Xrownum, Y, Ysize, Z, Zsize),
convert_float_array_to_list(Y, Ysize, PredicList), convert_float_array_to_list(Y, Ysize, PredicList),
...@@ -92,7 +92,7 @@ foreign(classify, c, classifyI( +pointer(float_array), +integer, +integer, ...@@ -92,7 +92,7 @@ foreign(classify, c, classifyI( +pointer(float_array), +integer, +integer,
%% Train in streaming mode on the given dataset. %% Train in streaming mode on the given dataset.
%% This takes one pass. Be sure that initAndBuildModel/14 has been called first! %% This takes one pass. Be sure that initAndBuildModel/14 has been called first!
%% %%
train(DataList, DataRows, LabelsList, BatchTraining) :- hoeffding_tree_train(DataList, DataRows, LabelsList, BatchTraining) :-
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
convert_list_to_float_array(LabelsList, array(Ysize, Y)), convert_list_to_float_array(LabelsList, array(Ysize, Y)),
trainI(X, Xsize, Xrownum, Y, Ysize, BatchTraining). trainI(X, Xsize, Xrownum, Y, Ysize, BatchTraining).
......
...@@ -9,106 +9,106 @@ ...@@ -9,106 +9,106 @@
reset_Model :- reset_Model :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
%% %%
%% TESTING predicate initAndBuildModel/12 %% TESTING predicate hoeffding_tree_initAndBuildModel/12
%% %%
:- begin_tests(initAndBuildModel). :- begin_tests(hoeffding_tree_initAndBuildModel).
%% Failure Tests %% Failure Tests
test(hoeffding_Init_Classify_Befor_Init, [error(_,system_error('The model is not initialized!'))]) :- test(hoeffding_Init_Classify_Befor_Init, [error(_,system_error('The model is not initialized!'))]) :-
classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _). hoeffding_tree_classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _).
test(hoeffding_Init_Wrong_TreeType, [error(domain_error('The given TreeType is unkown!' , wrongType), _)]) :- test(hoeffding_Init_Wrong_TreeType, [error(domain_error('The given TreeType is unkown!' , wrongType), _)]) :-
initAndBuildModel(wrongType, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(wrongType, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_Negative_NumClass, fail) :- test(hoeffding_Init_Negative_NumClass, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_Bad_SuccessProbability, fail) :- test(hoeffding_Init_Bad_SuccessProbability, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, -1.0, 5000, 100, 100, 10, 100), hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, -1.0, 5000, 100, 100, 10, 100),
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 2.0, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 2.0, 5000, 100, 100, 10, 100).
test(hoeffding_Init_Negative_MaxSamples, fail) :- test(hoeffding_Init_Negative_MaxSamples, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, -5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, -5000, 100, 100, 10, 100).
test(hoeffding_Init_Negative_CheckInterval, fail) :- test(hoeffding_Init_Negative_CheckInterval, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, -100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, -100, 100, 10, 100).
test(hoeffding_Init_Negative_MinSamples, fail) :- test(hoeffding_Init_Negative_MinSamples, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, -100, 10, 100). hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, -100, 10, 100).
test(hoeffding_Init_Negative_Bins, fail) :- test(hoeffding_Init_Negative_Bins, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, -10, 100). hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, -10, 100).
test(hoeffding_Init_Negative_ObservationsBeforeBinning, fail) :- test(hoeffding_Init_Negative_ObservationsBeforeBinning, fail) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, -100). hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, -100).
test(hoeffding_Init_With_Wrong_Label_Dims1, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :- test(hoeffding_Init_With_Wrong_Label_Dims1, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
%% If the label vector is to long it seems to cause no problems %% If the label vector is to long it seems to cause no problems
test(hoeffding_Init_With_Wrong_Label_Dims2) :- test(hoeffding_Init_With_Wrong_Label_Dims2) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
%% The same when the label values are out of range %% The same when the label values are out of range
test(hoeffding_Init_With_Wrong_Label_Value) :- test(hoeffding_Init_With_Wrong_Label_Value) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_With_Too_Many_Label_Value, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :- test(hoeffding_Init_With_Too_Many_Label_Value, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [1,1,0,2], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [1,1,0,2], 2, 0, 0.95, 5000, 100, 100, 10, 100).
%% Successful Tests %% Successful Tests
test(hoeffding_Init_GiniHoeffding_Direkt_Input) :- test(hoeffding_Init_GiniHoeffding_Direkt_Input) :-
initAndBuildModel(gini_hoeffding, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_hoeffding, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_GiniHoeffding_CSV_Input) :- test(hoeffding_Init_GiniHoeffding_CSV_Input) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(gini_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_GiniBinary_Direkt_Input) :- test(hoeffding_Init_GiniBinary_Direkt_Input) :-
initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_GiniBinary_CSV_Input) :- test(hoeffding_Init_GiniBinary_CSV_Input) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_InfoHoeffding_Direkt_Input) :- test(hoeffding_Init_InfoHoeffding_Direkt_Input) :-
initAndBuildModel(info_hoeffding, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(info_hoeffding, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_InfoHoeffding_CSV_Input) :- test(hoeffding_Init_InfoHoeffding_CSV_Input) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(info_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(info_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_InfoBinary_Direkt_Input) :- test(hoeffding_Init_InfoBinary_Direkt_Input) :-
initAndBuildModel(info_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(info_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
test(hoeffding_Init_InfoBinary_CSV_Input) :- test(hoeffding_Init_InfoBinary_CSV_Input) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(info_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100). hoeffding_tree_initAndBuildModel(info_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
:- end_tests(initAndBuildModel). :- end_tests(hoeffding_tree_initAndBuildModel).
%% %%
%% TESTING predicate classify/4 %% TESTING predicate hoeffding_tree_classify/4
%% %%
:- begin_tests(classify). :- begin_tests(hoeffding_tree_classify).
%% Failure Tests %% Failure Tests
test(hoeffding_Classify_Different_Dims_To_Train, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :- test(hoeffding_Classify_Different_Dims_To_Train, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :-
reset_Model, reset_Model,
classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredicList, ProbsList), hoeffding_tree_classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredicList, ProbsList),
print('\nPredictions: '), print('\nPredictions: '),
print(PredicList), print(PredicList),
print('\nProbabilities: '), print('\nProbabilities: '),
...@@ -120,8 +120,8 @@ test(hoeffding_Classify_Different_Dims_To_Train, [error(_,system_error('Labels V ...@@ -120,8 +120,8 @@ test(hoeffding_Classify_Different_Dims_To_Train, [error(_,system_error('Labels V
test(hoeffding_Classify_GiniHoeffding) :- test(hoeffding_Classify_GiniHoeffding) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(gini_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100), hoeffding_tree_initAndBuildModel(gini_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList), hoeffding_tree_classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
print('\nPredictions: '), print('\nPredictions: '),
print(PredicList), print(PredicList),
print('\nProbabilities: '), print('\nProbabilities: '),
...@@ -130,8 +130,8 @@ test(hoeffding_Classify_GiniHoeffding) :- ...@@ -130,8 +130,8 @@ test(hoeffding_Classify_GiniHoeffding) :-
test(hoeffding_Classify_GiniBinary) :- test(hoeffding_Classify_GiniBinary) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100), hoeffding_tree_initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList), hoeffding_tree_classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
print('\nPredictions: '), print('\nPredictions: '),
print(PredicList), print(PredicList),
print('\nProbabilities: '), print('\nProbabilities: '),
...@@ -140,8 +140,8 @@ test(hoeffding_Classify_GiniBinary) :- ...@@ -140,8 +140,8 @@ test(hoeffding_Classify_GiniBinary) :-
test(hoeffding_Classify_InfoHoeffding) :- test(hoeffding_Classify_InfoHoeffding) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(info_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100), hoeffding_tree_initAndBuildModel(info_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList), hoeffding_tree_classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
print('\nPredictions: '), print('\nPredictions: '),
print(PredicList), print(PredicList),
print('\nProbabilities: '), print('\nProbabilities: '),
...@@ -150,55 +150,55 @@ test(hoeffding_Classify_InfoHoeffding) :- ...@@ -150,55 +150,55 @@ test(hoeffding_Classify_InfoHoeffding) :-
test(hoeffding_Classify_InfoBinary) :- test(hoeffding_Classify_InfoBinary) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(info_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100), hoeffding_tree_initAndBuildModel(info_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList), hoeffding_tree_classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
print('\nPredictions: '), print('\nPredictions: '),
print(PredicList), print(PredicList),
print('\nProbabilities: '), print('\nProbabilities: '),
print(ProbsList). print(ProbsList).
:- end_tests(classify). :- end_tests(hoeffding_tree_classify).
%% %%
%% TESTING predicate train/4 %% TESTING predicate hoeffding_tree_train/4
%% %%
:- begin_tests(train). :- begin_tests(hoeffding_tree_train).
%% Failure Tests %% Failure Tests
test(hoeffding_Train_With_Wrong_Label_Dims1, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :- test(hoeffding_Train_With_Wrong_Label_Dims1, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :-
reset_Model, reset_Model,
train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,0,0], 0). hoeffding_tree_train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,0,0], 0).
%% If the label vector is to long it seems to cause no problems %% If the label vector is to long it seems to cause no problems
test(hoeffding_Train_With_Wrong_Label_Dims2) :- test(hoeffding_Train_With_Wrong_Label_Dims2) :-
reset_Model, reset_Model,
train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,0,0,1], 0). hoeffding_tree_train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,0,0,1], 0).
%% The same when the label values are out of range %% The same when the label values are out of range
test(hoeffding_Train_With_Wrong_Label_Value) :- test(hoeffding_Train_With_Wrong_Label_Value) :-
reset_Model, reset_Model,
train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,-1,0,-1], 0). hoeffding_tree_train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,-1,0,-1], 0).
test(hoeffding_Train_With_Too_Many_Label_Value, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :- test(hoeffding_Train_With_Too_Many_Label_Value, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
reset_Model, reset_Model,
train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [1,1,0,2], 0). hoeffding_tree_train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [1,1,0,2], 0).
test(hoeffding_Train_Bad_Data_Dims) :- test(hoeffding_Train_Bad_Data_Dims) :-
reset_Model, reset_Model,
train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 0). hoeffding_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 0).
%% Successful Tests %% Successful Tests
test(testDescription3) :- test(testDescription3) :-
reset_Model, reset_Model,
train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,0,1,1,0], 1), hoeffding_tree_train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,0,1,1,0], 1),
train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,0,1,1,0], 0). hoeffding_tree_train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,0,1,1,0], 0).
:- end_tests(train). :- end_tests(hoeffding_tree_train).
run_hoeffding_tree_tests :- run_hoeffding_tree_tests :-
run_tests(train). run_tests(hoeffding_tree_train).
:- module(kde, [ initAndBuildModel/13, :- module(kde, [ kde_initAndBuildModel/13,
evaluateWithQuery/3, kde_evaluateWithQuery/3,
evaluateNoQuery/1]). kde_evaluateNoQuery/1]).
%% requirements of library(struct) %% requirements of library(struct)
:- load_files(library(str_decl), :- load_files(library(str_decl),
...@@ -39,7 +39,7 @@ ...@@ -39,7 +39,7 @@
%% --Description-- %% --Description--
%% Build the KDE model with the given parameters and then trains it with the given reference data. %% Build the KDE model with the given parameters and then trains it with the given reference data.
%% %%
initAndBuildModel(Bandwidth, RelError, AbsError, KernelType, TreeType, Algorithm, MonteCarlo, McProb, InitialSampleSize, MCEntryCoef, MCBreakCoef, DataList, DataRows) :- kde_initAndBuildModel(Bandwidth, RelError, AbsError, KernelType, TreeType, Algorithm, MonteCarlo, McProb, InitialSampleSize, MCEntryCoef, MCBreakCoef, DataList, DataRows) :-
Bandwidth > 0.0, Bandwidth > 0.0,
RelError >= 0.0, RelError =< 1.0, RelError >= 0.0, RelError =< 1.0,
AbsError >= 0.0, AbsError >= 0.0,
...@@ -67,7 +67,7 @@ foreign(initAndBuildModel, c, initAndBuildModelI(+float32, +float32, +float32, ...@@ -67,7 +67,7 @@ foreign(initAndBuildModel, c, initAndBuildModelI(+float32, +float32, +float32,
%% --Description-- %% --Description--
%% initAndBuildModel/14 has to be called before. %% initAndBuildModel/14 has to be called before.
%% %%
evaluateWithQuery(QueryList, QueryRows, EstimationList) :- kde_evaluateWithQuery(QueryList, QueryRows, EstimationList) :-
convert_list_to_float_array(QueryList, QueryRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(QueryList, QueryRows, array(Xsize, Xrownum, X)),
evaluateWithQueryI(X, Xsize, Xrownum, Y, Ysize), evaluateWithQueryI(X, Xsize, Xrownum, Y, Ysize),
convert_float_array_to_list(Y, Ysize, EstimationList). convert_float_array_to_list(Y, Ysize, EstimationList).
...@@ -87,7 +87,7 @@ foreign(evaluateWithQuery, c, evaluateWithQueryI(+pointer(float_array), +integer ...@@ -87,7 +87,7 @@ foreign(evaluateWithQuery, c, evaluateWithQueryI(+pointer(float_array), +integer
%% If possible, it returns normalized estimations. %% If possible, it returns normalized estimations.
%% initAndBuildModel/14 has to be called before. %% initAndBuildModel/14 has to be called before.
%% %%
evaluateNoQuery(EstimationList) :- kde_evaluateNoQuery(EstimationList) :-
evaluateNoQueryI(Y, Ysize), evaluateNoQueryI(Y, Ysize),
convert_float_array_to_list(Y, Ysize, EstimationList). convert_float_array_to_list(Y, Ysize, EstimationList).
......
This diff is collapsed.
...@@ -27,12 +27,12 @@ ...@@ -27,12 +27,12 @@
%% float32 bandwidth needed by gaussian, epanechnikov, laplacian, %% float32 bandwidth needed by gaussian, epanechnikov, laplacian,
%% float32 scale needed by hyptan, %% float32 scale needed by hyptan,
%% mat data %% mat data
%% int newDimension
%% %%
%% --Output-- %% --Output--
%% mat transformedData, %% mat transformedData,
%% vec eigenValues, %% vec eigenValues,
%% mat eigenVectores, %% mat eigenVectores
%% int newDimension
%% %%
%% --Description-- %% --Description--
%% This program performs Kernel Principal Components Analysis (KPCA) on the specified dataset with the specified kernel. This will transform the data onto the kernel principal components, and optionally reduce the dimensionality by ignoring the kernel principal components with the smallest eigenvalues. %% This program performs Kernel Principal Components Analysis (KPCA) on the specified dataset with the specified kernel. This will transform the data onto the kernel principal components, and optionally reduce the dimensionality by ignoring the kernel principal components with the smallest eigenvalues.
......
:- module(kfn, [ initAndBuildModel/7, :- module(kfn, [ kfn_initAndBuildModel/7,
searchWithQuery/7, kfn_searchWithQuery/7,
searchNoQuery/5]). kfn_searchNoQuery/5]).
%% requirements of library(struct) %% requirements of library(struct)
:- load_files(library(str_decl), :- load_files(library(str_decl),
...@@ -33,7 +33,7 @@ ...@@ -33,7 +33,7 @@
%% --Description-- %% --Description--
%% Initialize the Model and build it. %% Initialize the Model and build it.
%% %%
initAndBuildModel(TreeType, SearchMode, RandomBasis, LeafSize, Epsilon, ReferenceList, ReferenceRows) :- kfn_initAndBuildModel(TreeType, SearchMode, RandomBasis, LeafSize, Epsilon, ReferenceList, ReferenceRows) :-
LeafSize >= 1, LeafSize >= 1,
Epsilon >= 0, Epsilon >= 0,
convert_list_to_float_array(ReferenceList, ReferenceRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(ReferenceList, ReferenceRows, array(Xsize, Xrownum, X)),
...@@ -56,7 +56,7 @@ foreign(initAndBuildModel, c, initAndBuildModelI(+string, +string, ...@@ -56,7 +56,7 @@ foreign(initAndBuildModel, c, initAndBuildModelI(+string, +string,
%% --Description-- %% --Description--
%% Perform neighbor search on the queryset. %% Perform neighbor search on the queryset.
%% %%
searchWithQuery(QueryList, QueryRows, K, NeighborsList, YCols, DistancesList, ZCols) :- kfn_searchWithQuery(QueryList, QueryRows, K, NeighborsList, YCols, DistancesList, ZCols) :-
K > 0, K > 0,
convert_list_to_float_array(QueryList, QueryRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(QueryList, QueryRows, array(Xsize, Xrownum, X)),
searchWithQueryI(X, Xsize, Xrownum, K, Y, YCols, YRows, Z, ZCols, ZRows), searchWithQueryI(X, Xsize, Xrownum, K, Y, YCols, YRows, Z, ZCols, ZRows),
...@@ -80,7 +80,7 @@ foreign(searchWithQuery, c, searchWithQueryI( +pointer(float_array), +integer, ...@@ -80,7 +80,7 @@ foreign(searchWithQuery, c, searchWithQueryI( +pointer(float_array), +integer,
%% --Description-- %% --Description--
%% Perform monochromatic neighbor search. %% Perform monochromatic neighbor search.
%% %%
searchNoQuery(K, NeighborsList, YCols, DistancesList, ZCols) :- kfn_searchNoQuery(K, NeighborsList, YCols, DistancesList, ZCols) :-
K > 0, K > 0,
searchNoQueryI(K, Y, YCols, YRows, Z, ZCols, ZRows), searchNoQueryI(K, Y, YCols, YRows, Z, ZCols, ZRows),
convert_float_array_to_2d_list(Y, YCols, YRows, NeighborsList), convert_float_array_to_2d_list(Y, YCols, YRows, NeighborsList),
......
...@@ -7,149 +7,149 @@ ...@@ -7,149 +7,149 @@
:- use_module('../../helper_files/helper.pl'). :- use_module('../../helper_files/helper.pl').
reset_Model :- reset_Model :-
initAndBuildModel(kd, dual_tree, 0, 20, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(kd, dual_tree, 0, 20, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
%% %%
%% TESTING predicate initAndBuildModel/9 %% TESTING predicate kfn_initAndBuildModel/9
%% %%
:- begin_tests(initAndBuildModel). :- begin_tests(kfn_initAndBuildModel).
%% Failure Tests %% Failure Tests
test(kfn_InitAndBuildModel_Wrong_TreeType_Input, [error(domain_error('The given TreeType is unknown!' , wrongInput), _)]) :- test(kfn_InitAndBuildModel_Wrong_TreeType_Input, [error(domain_error('The given TreeType is unknown!' , wrongInput), _)]) :-
initAndBuildModel(wrongInput, dual_tree, 0, 20, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(wrongInput, dual_tree, 0, 20, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_Wrong_SearchMode_Input, [error(domain_error('The given SearchMode is unknown!' , wrongInput), _)]) :- test(kfn_InitAndBuildModel_Wrong_SearchMode_Input, [error(domain_error('The given SearchMode is unknown!' , wrongInput), _)]) :-
initAndBuildModel(kd, wrongInput, 0, 20, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(kd, wrongInput, 0, 20, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_Negative_LeafSize, fail) :- test(kfn_InitAndBuildModel_Negative_LeafSize, fail) :-
initAndBuildModel(kd, dual_tree, 0, 0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(kd, dual_tree, 0, 0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_Negative_Epsilon, fail) :- test(kfn_InitAndBuildModel_Negative_Epsilon, fail) :-
initAndBuildModel(kd, dual_tree, 0, 20, -1.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(kd, dual_tree, 0, 20, -1.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
%% Successful Tests %% Successful Tests
test(kfn_InitAndBuildModel_KD) :- test(kfn_InitAndBuildModel_KD) :-
initAndBuildModel(kd, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(kd, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(kd, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(kd, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(kd, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(kd, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(kd, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(kd, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_VP) :- test(kfn_InitAndBuildModel_VP) :-
initAndBuildModel(vp, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(vp, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(vp, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(vp, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(vp, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(vp, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(vp, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(vp, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_RP) :- test(kfn_InitAndBuildModel_RP) :-
initAndBuildModel(rp, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(rp, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(rp, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(rp, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(rp, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(rp, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(rp, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(rp, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_MAX_RP) :- test(kfn_InitAndBuildModel_MAX_RP) :-
initAndBuildModel(max_rp, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(max_rp, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(max_rp, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(max_rp, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(max_rp, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(max_rp, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(max_rp, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(max_rp, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_UB) :- test(kfn_InitAndBuildModel_UB) :-
initAndBuildModel(ub, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(ub, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(ub, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(ub, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(ub, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(ub, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(ub, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(ub, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_COVER) :- test(kfn_InitAndBuildModel_COVER) :-
initAndBuildModel(cover, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(cover, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(cover, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(cover, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(cover, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(cover, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(cover, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(cover, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_R) :- test(kfn_InitAndBuildModel_R) :-
initAndBuildModel(r, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(r, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(r, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(r, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(r, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(r, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(r, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(r, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_R_STAR) :- test(kfn_InitAndBuildModel_R_STAR) :-
initAndBuildModel(r_star, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(r_star, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(r_star, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(r_star, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(r_star, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(r_star, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(r_star, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(r_star, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_X) :- test(kfn_InitAndBuildModel_X) :-
initAndBuildModel(x, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(x, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(x, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(x, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(x, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(x, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(x, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(x, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_BALL) :- test(kfn_InitAndBuildModel_BALL) :-
initAndBuildModel(ball, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(ball, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(ball, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(ball, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(ball, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(ball, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(ball, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(ball, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_Hilbert_R) :- test(kfn_InitAndBuildModel_Hilbert_R) :-
initAndBuildModel(hilbert_r, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(hilbert_r, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(hilbert_r, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(hilbert_r, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(hilbert_r, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(hilbert_r, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(hilbert_r, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(hilbert_r, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_R_Plus) :- test(kfn_InitAndBuildModel_R_Plus) :-
initAndBuildModel(r_plus, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(r_plus, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(r_plus, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(r_plus, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(r_plus, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(r_plus, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(r_plus, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(r_plus, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_R_Plus_Plus) :- test(kfn_InitAndBuildModel_R_Plus_Plus) :-
initAndBuildModel(r_plus_plus, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(r_plus_plus, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(r_plus_plus, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(r_plus_plus, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(r_plus_plus, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(r_plus_plus, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(r_plus_plus, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(r_plus_plus, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_OCT) :- test(kfn_InitAndBuildModel_OCT) :-
initAndBuildModel(oct, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(oct, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(oct, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(oct, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(oct, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3), kfn_initAndBuildModel(oct, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
initAndBuildModel(oct, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3). kfn_initAndBuildModel(oct, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
test(kfn_InitAndBuildModel_CSV_Input) :- test(kfn_InitAndBuildModel_CSV_Input) :-
reset_Model, reset_Model,
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(kd, dual_tree, 1, 20, 0.005, Data, 4). kfn_initAndBuildModel(kd, dual_tree, 1, 20, 0.005, Data, 4).
:- end_tests(initAndBuildModel). :- end_tests(kfn_initAndBuildModel).
%% %%
%% TESTING predicate searchWithQuery/7 %% TESTING predicate kfn_searchWithQuery/7
%% %%
:- begin_tests(searchWithQuery). :- begin_tests(kfn_searchWithQuery).
%% Failure Tests %% Failure Tests
test(kfn_SearchWithQuery_Wrong_Query_Dims, [error(_,system_error('Queryset has Dim(4) but the Referenceset has Dim(3)'))]) :- test(kfn_SearchWithQuery_Wrong_Query_Dims, [error(_,system_error('Queryset has Dim(4) but the Referenceset has Dim(3)'))]) :-
reset_Model, reset_Model,
searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, 2, _, _, _, _). kfn_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, 2, _, _, _, _).
test(kfn_SearchWithQuery_Negative_K, fail) :- test(kfn_SearchWithQuery_Negative_K, fail) :-
reset_Model, reset_Model,
searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, -1, _, _, _, _). kfn_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, -1, _, _, _, _).
test(kfn_SearchWithQuery_Too_Large_K, [error(_,system_error('Requested value of k (10) is greater than the number of points in the reference set (4)'))]) :- test(kfn_SearchWithQuery_Too_Large_K, [error(_,system_error('Requested value of k (10) is greater than the number of points in the reference set (4)'))]) :-
reset_Model, reset_Model,
searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 10, _, _, _, _). kfn_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 10, _, _, _, _).
%% Successful Tests %% Successful Tests
test(kfn_SearchWithQuery_Normal) :- test(kfn_SearchWithQuery_Normal) :-
reset_Model, reset_Model,
searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 2, NeighborsList, _, DistancesList, _), kfn_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 2, NeighborsList, _, DistancesList, _),
print('\nNeighbors: '), print('\nNeighbors: '),
print(NeighborsList), print(NeighborsList),
print('\nDistances: '), print('\nDistances: '),
...@@ -158,38 +158,38 @@ test(kfn_SearchWithQuery_Normal) :- ...@@ -158,38 +158,38 @@ test(kfn_SearchWithQuery_Normal) :-
test(kfn_SearchWithQuery_CSV_Input) :- test(kfn_SearchWithQuery_CSV_Input) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(kd, dual_tree, 0, 20, 0.0, Data, 4), kfn_initAndBuildModel(kd, dual_tree, 0, 20, 0.0, Data, 4),
searchWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, 7, NeighborsList, _, DistancesList, _), kfn_searchWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, 7, NeighborsList, _, DistancesList, _),
print('\nNeighbors: '), print('\nNeighbors: '),
print(NeighborsList), print(NeighborsList),
print('\nDistances: '), print('\nDistances: '),
print(DistancesList). print(DistancesList).
:- end_tests(searchWithQuery). :- end_tests(kfn_searchWithQuery).
%% %%
%% TESTING predicate searchNoQuery/10 %% TESTING predicate kfn_searchNoQuery/10
%% %%
:- begin_tests(searchNoQuery). :- begin_tests(kfn_searchNoQuery).
%% Failure Tests %% Failure Tests
test(kfn_SearchNoQuery_Negative_K, fail) :- test(kfn_SearchNoQuery_Negative_K, fail) :-
reset_Model, reset_Model,
searchNoQuery(-1, _, _, _, _). kfn_searchNoQuery(-1, _, _, _, _).
test(kfn_SearchNoQuery_Too_Large_K, [error(_,system_error('Requested value of k (15) is greater than the number of points in the reference set (4)'))]) :- test(kfn_SearchNoQuery_Too_Large_K, [error(_,system_error('Requested value of k (15) is greater than the number of points in the reference set (4)'))]) :-
reset_Model, reset_Model,
searchNoQuery(15, _, _, _, _). kfn_searchNoQuery(15, _, _, _, _).
%% Successful Tests %% Successful Tests
test(kfn_SearchNoQuery_Normal) :- test(kfn_SearchNoQuery_Normal) :-
reset_Model, reset_Model,
searchNoQuery(2, NeighborsList, _, DistancesList, _), kfn_searchNoQuery(2, NeighborsList, _, DistancesList, _),
print('\nNeighbors: '), print('\nNeighbors: '),
print(NeighborsList), print(NeighborsList),
print('\nDistances: '), print('\nDistances: '),
...@@ -198,14 +198,14 @@ test(kfn_SearchNoQuery_Normal) :- ...@@ -198,14 +198,14 @@ test(kfn_SearchNoQuery_Normal) :-
test(kfn_SearchNoQuery_CSV_Input) :- test(kfn_SearchNoQuery_CSV_Input) :-
open('src/data_csv/iris2.csv', read, File), open('src/data_csv/iris2.csv', read, File),
take_csv_row(File, skipFirstRow,10, Data), take_csv_row(File, skipFirstRow,10, Data),
initAndBuildModel(kd, dual_tree, 0, 20, 0.0, Data, 4), kfn_initAndBuildModel(kd, dual_tree, 0, 20, 0.0, Data, 4),
searchNoQuery(7, NeighborsList, _, DistancesList, _), kfn_searchNoQuery(7, NeighborsList, _, DistancesList, _),
print('\nNeighbors: '), print('\nNeighbors: '),
print(NeighborsList), print(NeighborsList),
print('\nDistances: '), print('\nDistances: '),
print(DistancesList). print(DistancesList).
:- end_tests(searchNoQuery). :- end_tests(kfn_searchNoQuery).
run_kfn_tests :- run_kfn_tests :-
run_tests. run_tests.
......
:- module(knn, [ initAndBuildModel/9, :- module(knn, [ knn_initAndBuildModel/9,
searchWithQuery/7, knn_searchWithQuery/7,
searchNoQuery/5]). knn_searchNoQuery/5]).
%% requirements of library(struct) %% requirements of library(struct)
:- load_files(library(str_decl), :- load_files(library(str_decl),
...@@ -34,7 +34,7 @@ ...@@ -34,7 +34,7 @@
%% --Description-- %% --Description--
%% Initialize the Model and build it. %% Initialize the Model and build it.
%% %%
initAndBuildModel(TreeType, SearchMode, RandomBasis, LeafSize, Tau, Rho, Epsilon, ReferenceList, ReferenceRows) :- knn_initAndBuildModel(TreeType, SearchMode, RandomBasis, LeafSize, Tau, Rho, Epsilon, ReferenceList, ReferenceRows) :-
LeafSize >= 1, LeafSize >= 1,
Tau >= 0, Tau >= 0,
Rho >= 0, Rho >= 0,
...@@ -60,7 +60,7 @@ foreign(initAndBuildModel, c, initAndBuildModelI( +string, +string, ...@@ -60,7 +60,7 @@ foreign(initAndBuildModel, c, initAndBuildModelI( +string, +string,
%% --Description-- %% --Description--
%% Perform neighbor search on the queryset. %% Perform neighbor search on the queryset.
%% %%
searchWithQuery(QueryList, QueryRows, K, NeighborsList, YCols, DistancesList, ZCols) :- knn_searchWithQuery(QueryList, QueryRows, K, NeighborsList, YCols, DistancesList, ZCols) :-
K > 0, K > 0,
convert_list_to_float_array(QueryList, QueryRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(QueryList, QueryRows, array(Xsize, Xrownum, X)),
searchWithQueryI(X, Xsize, Xrownum, K, Y, YCols, YRows, Z, ZCols, ZRows), searchWithQueryI(X, Xsize, Xrownum, K, Y, YCols, YRows, Z, ZCols, ZRows),
...@@ -83,7 +83,7 @@ foreign(searchWithQuery, c, searchWithQueryI( +pointer(float_array), +integer, ...@@ -83,7 +83,7 @@ foreign(searchWithQuery, c, searchWithQueryI( +pointer(float_array), +integer,
%% --Description-- %% --Description--
%% Perform monochromatic neighbor search. %% Perform monochromatic neighbor search.
%% %%
searchNoQuery(K, NeighborsList, YCols, DistancesList, ZCols) :- knn_searchNoQuery(K, NeighborsList, YCols, DistancesList, ZCols) :-
K > 0, K > 0,
searchNoQueryI(K, Y, YCols, YRows, Z, ZCols, ZRows), searchNoQueryI(K, Y, YCols, YRows, Z, ZCols, ZRows),
convert_float_array_to_2d_list(Y, YCols, YRows, NeighborsList), convert_float_array_to_2d_list(Y, YCols, YRows, NeighborsList),
......
This diff is collapsed.
:- module(lars, [ initModelNoDataNoGram/4, :- module(lars, [ lars_initModelNoDataNoGram/4,
initModelNoDataWithGram/6, lars_initModelNoDataWithGram/6,
initModelWithDataNoGram/8, lars_initModelWithDataNoGram/8,
initModelWithDataWithGram/10, lars_initModelWithDataWithGram/10,
activeSet/1, lars_activeSet/1,
beta/1, lars_beta/1,
betaPath/2, lars_betaPath/2,
computeError/5, lars_computeError/5,
lambdaPath/1, lars_lambdaPath/1,
matUtriCholFactor/2, lars_matUtriCholFactor/2,
predict/4, lars_predict/4,
train/6]). lars_train/6]).
:- load_files(library(str_decl), :- load_files(library(str_decl),
[when(compile_time), if(changed)]). [when(compile_time), if(changed)]).
...@@ -38,7 +38,7 @@ ...@@ -38,7 +38,7 @@
%% --Description-- %% --Description--
%% Only initialize the LARS model. %% Only initialize the LARS model.
%% %%
initModelNoDataNoGram(UseCholesky, Lambda1, Lambda2, Tolerance) :- lars_initModelNoDataNoGram(UseCholesky, Lambda1, Lambda2, Tolerance) :-
initModelNoDataNoGramI(UseCholesky, Lambda1, Lambda2, Tolerance). initModelNoDataNoGramI(UseCholesky, Lambda1, Lambda2, Tolerance).
foreign(initModelNoDataNoGram, c, initModelNoDataNoGramI(+integer, foreign(initModelNoDataNoGram, c, initModelNoDataNoGramI(+integer,
...@@ -58,7 +58,7 @@ foreign(initModelNoDataNoGram, c, initModelNoDataNoGramI(+integer, ...@@ -58,7 +58,7 @@ foreign(initModelNoDataNoGram, c, initModelNoDataNoGramI(+integer,
%% --Description-- %% --Description--
%% Initialize LARS model, and pass in a precalculated Gram matrix but dont train the model. %% Initialize LARS model, and pass in a precalculated Gram matrix but dont train the model.
%% %%
initModelNoDataWithGram(UseCholesky, GramList, GramRows, Lambda1, Lambda2, Tolerance) :- lars_initModelNoDataWithGram(UseCholesky, GramList, GramRows, Lambda1, Lambda2, Tolerance) :-
convert_list_to_float_array(GramList, GramRows, array(Zsize, Zrownum, Z)), convert_list_to_float_array(GramList, GramRows, array(Zsize, Zrownum, Z)),
initModelNoDataWithGramI(UseCholesky, Z, Zsize, Zrownum, Lambda1, Lambda2, Tolerance). initModelNoDataWithGramI(UseCholesky, Z, Zsize, Zrownum, Lambda1, Lambda2, Tolerance).
...@@ -81,7 +81,7 @@ foreign(initModelNoDataWithGram, c, initModelNoDataWithGramI( +integer, ...@@ -81,7 +81,7 @@ foreign(initModelNoDataWithGram, c, initModelNoDataWithGramI( +integer,
%% --Description-- %% --Description--
%% Initialize LARS model, and train the model. %% Initialize LARS model, and train the model.
%% %%
initModelWithDataNoGram(DataList, DataRows, ResponsesList, TransposeData, UseCholesky, Lambda1, Lambda2, Tolerance) :- lars_initModelWithDataNoGram(DataList, DataRows, ResponsesList, TransposeData, UseCholesky, Lambda1, Lambda2, Tolerance) :-
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
convert_list_to_float_array(ResponsesList, array(Ysize, Y)), convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
initModelWithDataNoGramI(X, Xsize, Xrownum, Y, Ysize, TransposeData, UseCholesky, Lambda1, Lambda2, Tolerance). initModelWithDataNoGramI(X, Xsize, Xrownum, Y, Ysize, TransposeData, UseCholesky, Lambda1, Lambda2, Tolerance).
...@@ -109,7 +109,7 @@ foreign(initModelWithDataNoGram, c, initModelWithDataNoGramI( +pointer(float_a ...@@ -109,7 +109,7 @@ foreign(initModelWithDataNoGram, c, initModelWithDataNoGramI( +pointer(float_a
%% --Description-- %% --Description--
%% Initialize LARS model, pass in a precalculated Gram matrix and train the model. %% Initialize LARS model, pass in a precalculated Gram matrix and train the model.
%% %%
initModelWithDataWithGram(DataList, DataRows, ResponsesList, TransposeData, UseCholesky, GramList, GramRows, Lambda1, Lambda2, Tolerance) :- lars_initModelWithDataWithGram(DataList, DataRows, ResponsesList, TransposeData, UseCholesky, GramList, GramRows, Lambda1, Lambda2, Tolerance) :-
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
convert_list_to_float_array(ResponsesList, array(Ysize, Y)), convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
convert_list_to_float_array(GramList, GramRows, array(Zsize, Zrownum, Z)), convert_list_to_float_array(GramList, GramRows, array(Zsize, Zrownum, Z)),
...@@ -132,7 +132,7 @@ foreign(initModelWithDataWithGram, c, initModelWithDataWithGramI(+pointer(float_ ...@@ -132,7 +132,7 @@ foreign(initModelWithDataWithGram, c, initModelWithDataWithGramI(+pointer(float_
%% --Description-- %% --Description--
%% Get the set of active dimensions %% Get the set of active dimensions
%% %%
activeSet(ActiveSetList) :- lars_activeSet(ActiveSetList) :-
activeSetI(Y, Ysize), activeSetI(Y, Ysize),
convert_float_array_to_list(Y, Ysize, ActiveSetList). convert_float_array_to_list(Y, Ysize, ActiveSetList).
...@@ -147,7 +147,7 @@ foreign(activeSet, c, activeSetI(-pointer(float_array), -integer)). ...@@ -147,7 +147,7 @@ foreign(activeSet, c, activeSetI(-pointer(float_array), -integer)).
%% --Description-- %% --Description--
%% Get the solution coefficients. %% Get the solution coefficients.
%% %%
beta(BetaList) :- lars_beta(BetaList) :-
betaI(Y, Ysize), betaI(Y, Ysize),
convert_float_array_to_list(Y, Ysize, BetaList). convert_float_array_to_list(Y, Ysize, BetaList).
...@@ -162,7 +162,7 @@ foreign(beta, c, betaI(-pointer(float_array), -integer)). ...@@ -162,7 +162,7 @@ foreign(beta, c, betaI(-pointer(float_array), -integer)).
%% --Description-- %% --Description--
%% Get the set of coefficients after each iteration. The solution is the last element. %% Get the set of coefficients after each iteration. The solution is the last element.
%% %%
betaPath(BetaList, XCols) :- lars_betaPath(BetaList, XCols) :-
betaPathI(X, XCols, XRows), betaPathI(X, XCols, XRows),
convert_float_array_to_2d_list(X, XCols, XRows, BetaList). convert_float_array_to_2d_list(X, XCols, XRows, BetaList).
...@@ -180,7 +180,7 @@ foreign(betaPath, c, betaPathI(-pointer(float_array), -integer, -integer)). ...@@ -180,7 +180,7 @@ foreign(betaPath, c, betaPathI(-pointer(float_array), -integer, -integer)).
%% --Description-- %% --Description--
%% Compute cost error of the given data matrix using the currently-trained LARS model.Only ||y-beta*X||2 is used to calculate cost error. %% Compute cost error of the given data matrix using the currently-trained LARS model.Only ||y-beta*X||2 is used to calculate cost error.
%% %%
computeError(DataList, DataRows, ResponsesList, RowMajor, Error) :- lars_computeError(DataList, DataRows, ResponsesList, RowMajor, Error) :-
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
convert_list_to_float_array(ResponsesList, array(Ysize, Y)), convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
computeErrorI(X, Xsize, Xrownum, Y, Ysize, RowMajor, Error). computeErrorI(X, Xsize, Xrownum, Y, Ysize, RowMajor, Error).
...@@ -199,7 +199,7 @@ foreign(computeError, c, computeErrorI( +pointer(float_array), +integer, +intege ...@@ -199,7 +199,7 @@ foreign(computeError, c, computeErrorI( +pointer(float_array), +integer, +intege
%% --Description-- %% --Description--
%% Get the set of values for lambda1 after each iteration; the solution is the last element. %% Get the set of values for lambda1 after each iteration; the solution is the last element.
%% %%
lambdaPath(LambdaPathList) :- lars_lambdaPath(LambdaPathList) :-
lambdaPathI(Y, Ysize), lambdaPathI(Y, Ysize),
convert_float_array_to_list(Y, Ysize, LambdaPathList). convert_float_array_to_list(Y, Ysize, LambdaPathList).
...@@ -214,7 +214,7 @@ foreign(lambdaPath, c, lambdaPathI(-pointer(float_array), -integer)). ...@@ -214,7 +214,7 @@ foreign(lambdaPath, c, lambdaPathI(-pointer(float_array), -integer)).
%% --Description-- %% --Description--
%% Get the upper triangular cholesky factor. %% Get the upper triangular cholesky factor.
%% %%
matUtriCholFactor(FactorList, XCols) :- lars_matUtriCholFactor(FactorList, XCols) :-
matUtriCholFactorI(X, XCols, XRows), matUtriCholFactorI(X, XCols, XRows),
convert_float_array_to_2d_list(X, XCols, XRows, FactorList). convert_float_array_to_2d_list(X, XCols, XRows, FactorList).
...@@ -231,7 +231,7 @@ foreign(matUtriCholFactor, c, matUtriCholFactorI(-pointer(float_array), -integer ...@@ -231,7 +231,7 @@ foreign(matUtriCholFactor, c, matUtriCholFactorI(-pointer(float_array), -integer
%% --Description-- %% --Description--
%% Predict y_i for each data point in the given data matrix using the currently-trained LARS model. %% Predict y_i for each data point in the given data matrix using the currently-trained LARS model.
%% %%
predict(PointsList, PointsRows, PredicList, RowMajor) :- lars_predict(PointsList, PointsRows, PredicList, RowMajor) :-
convert_list_to_float_array(PointsList, PointsRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(PointsList, PointsRows, array(Xsize, Xrownum, X)),
predictI(X, Xsize, Xrownum, Y, Ysize, RowMajor), predictI(X, Xsize, Xrownum, Y, Ysize, RowMajor),
convert_float_array_to_list(Y, Ysize, PredicList). convert_float_array_to_list(Y, Ysize, PredicList).
...@@ -253,7 +253,7 @@ foreign(predict, c, predictI( +pointer(float_array), +integer, +integer, ...@@ -253,7 +253,7 @@ foreign(predict, c, predictI( +pointer(float_array), +integer, +integer,
%% --Description-- %% --Description--
%% Train the LARS model with the given data. %% Train the LARS model with the given data.
%% %%
train(DataList, DataRows, ResponsesList, BetaList, RowMajor, Error) :- lars_train(DataList, DataRows, ResponsesList, BetaList, RowMajor, Error) :-
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
convert_list_to_float_array(ResponsesList, array(Ysize, Y)), convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
trainI(X, Xsize, Xrownum, Y, Ysize, Z, Zsize, RowMajor, Error), trainI(X, Xsize, Xrownum, Y, Ysize, Z, Zsize, RowMajor, Error),
......
...@@ -9,10 +9,10 @@ ...@@ -9,10 +9,10 @@
:- use_module('../../helper_files/helper.pl'). :- use_module('../../helper_files/helper.pl').
reset_Model :- reset_Model :-
initModelNoDataNoGram(1, 0.1, 0.3, 0.001), lars_initModelNoDataNoGram(1, 0.1, 0.3, 0.001),
convert_list_to_float_array([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5],3, array(Xsize, Xrownum, X)), convert_list_to_float_array([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5],3, array(Xsize, Xrownum, X)),
convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)), convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
trainNoBetaReturn(X,Xsize, Xrownum,Y, Ysize, 1, _). lars_trainNoBetaReturn(X,Xsize, Xrownum,Y, Ysize, 1, _).
:- begin_tests(lists). :- begin_tests(lists).
...@@ -22,27 +22,27 @@ test(train, [true(A =:= 0)]) :- ...@@ -22,27 +22,27 @@ test(train, [true(A =:= 0)]) :-
reset_Model, reset_Model,
convert_list_to_float_array([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5],3, array(Xsize, Xrownum, X)), convert_list_to_float_array([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5],3, array(Xsize, Xrownum, X)),
convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)), convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
trainNoBetaReturn(X,Xsize, Xrownum,Y, Ysize, 1, A), lars_trainNoBetaReturn(X,Xsize, Xrownum,Y, Ysize, 1, A),
predict(X, Xsize,Xrownum, Predic, PredicSize,0), lars_predict(X, Xsize,Xrownum, Predic, PredicSize,0),
convert_float_array_to_list(Predic, PredicSize, Result), convert_float_array_to_list(Predic, PredicSize, Result),
print(Result). print(Result).
test(activeSet) :- test(activeSet) :-
reset_Model, reset_Model,
activeSet(ActSet, ActSetSize), lars_activeSet(ActSet, ActSetSize),
convert_float_array_to_list(ActSet, ActSetSize, Result), convert_float_array_to_list(ActSet, ActSetSize, Result),
print(Result). print(Result).
test(matUtriCholFactor) :- test(matUtriCholFactor) :-
reset_Model, reset_Model,
matUtriCholFactor(Matrix, MatrixColNum, MatrixRowNum), lars_matUtriCholFactor(Matrix, MatrixColNum, MatrixRowNum),
print(MatrixColNum), print(MatrixColNum),
convert_float_array_to_2d_list(Matrix, MatrixColNum, MatrixRowNum, Results), convert_float_array_to_2d_list(Matrix, MatrixColNum, MatrixRowNum, Results),
print(Results). print(Results).
test(betaPath) :- test(betaPath) :-
reset_Model, reset_Model,
betaPath(Matrix, MatrixColNum, MatrixRowNum), lars_betaPath(Matrix, MatrixColNum, MatrixRowNum),
print(MatrixColNum), print(MatrixColNum),
convert_float_array_to_2d_list(Matrix, MatrixColNum, MatrixRowNum, Results), convert_float_array_to_2d_list(Matrix, MatrixColNum, MatrixRowNum, Results),
print(Results). print(Results).
......
:- module(linear_SVM, [ initModelWithTrain/8, :- module(linear_SVM, [ linear_SVM_initModelWithTrain/8,
initModelNoTrain/4, linear_SVM_initModelNoTrain/4,
classify/5, linear_SVM_classify/5,
classifyPoint/2, linear_SVM_classifyPoint/2,
computeAccuracy/4, linear_SVM_computeAccuracy/4,
train/6]). linear_SVM_train/6]).
%% requirements of library(struct) %% requirements of library(struct)
:- load_files(library(str_decl), :- load_files(library(str_decl),
...@@ -36,7 +36,7 @@ ...@@ -36,7 +36,7 @@
%% --Description-- %% --Description--
%% Initializes the linear_svm model with the given data and trains it. %% Initializes the linear_svm model with the given data and trains it.
%% %%
initModelWithTrain(DataList, DataRows, LabelsList, NumClasses, Lambda, Delta, FitIntercept, Optimizer) :- linear_SVM_initModelWithTrain(DataList, DataRows, LabelsList, NumClasses, Lambda, Delta, FitIntercept, Optimizer) :-
NumClasses >= 0, NumClasses >= 0,
Lambda >= 0.0, Lambda >= 0.0,
Delta >= 0.0, Delta >= 0.0,
...@@ -62,7 +62,7 @@ foreign(initModelWithTrain, c, initModelWithTrainI( +pointer(float_array), + ...@@ -62,7 +62,7 @@ foreign(initModelWithTrain, c, initModelWithTrainI( +pointer(float_array), +
%% --Description-- %% --Description--
%% Initializes the linear_svm model with the given data but doesnt train it. %% Initializes the linear_svm model with the given data but doesnt train it.
%% %%
initModelNoTrain(NumClasses, Lambda, Delta, FitIntercept) :- linear_SVM_initModelNoTrain(NumClasses, Lambda, Delta, FitIntercept) :-
NumClasses >= 0, NumClasses >= 0,
Lambda >= 0.0, Lambda >= 0.0,
Delta >= 0.0, Delta >= 0.0,
...@@ -82,7 +82,7 @@ foreign(initModelNoTrain, c, initModelNoTrainI( +integer, +float32, +float32, ...@@ -82,7 +82,7 @@ foreign(initModelNoTrain, c, initModelNoTrainI( +integer, +float32, +float32,
%% --Description-- %% --Description--
%% Classify the given points, returning class scores and predicted class label for each point. %% Classify the given points, returning class scores and predicted class label for each point.
%% %%
classify(DataList, DataRows, LabelsList, ScoresList, ZCols) :- linear_SVM_classify(DataList, DataRows, LabelsList, ScoresList, ZCols) :-
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
classifyI(X, Xsize, Xrows, Y, Ysize, Z, ZCols, ZRows), classifyI(X, Xsize, Xrows, Y, Ysize, Z, ZCols, ZRows),
convert_float_array_to_list(Y, Ysize, LabelsList), convert_float_array_to_list(Y, Ysize, LabelsList),
...@@ -102,7 +102,7 @@ foreign(classify, c, classifyI( +pointer(float_array), +integer, +integer, ...@@ -102,7 +102,7 @@ foreign(classify, c, classifyI( +pointer(float_array), +integer, +integer,
%% --Description-- %% --Description--
%% Classify the given point. %% Classify the given point.
%% %%
classifyPoint(DataList, Prediction) :- linear_SVM_classifyPoint(DataList, Prediction) :-
convert_list_to_float_array(DataList, array(Xsize, X)), convert_list_to_float_array(DataList, array(Xsize, X)),
classifyPointI(X, Xsize, Prediction). classifyPointI(X, Xsize, Prediction).
...@@ -121,7 +121,7 @@ foreign(classifyPoint, c, classifyPointI( +pointer(float_array), +integer, ...@@ -121,7 +121,7 @@ foreign(classifyPoint, c, classifyPointI( +pointer(float_array), +integer,
%% Computes accuracy of the learned model given the feature data and the labels associated with each data point. %% Computes accuracy of the learned model given the feature data and the labels associated with each data point.
%% Predictions are made using the provided data and are compared with the actual labels. %% Predictions are made using the provided data and are compared with the actual labels.
%% %%
computeAccuracy(DataList, DataRows, LabelsList, Accuracy) :- linear_SVM_computeAccuracy(DataList, DataRows, LabelsList, Accuracy) :-
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
convert_list_to_float_array(LabelsList, array(Ysize, Y)), convert_list_to_float_array(LabelsList, array(Ysize, Y)),
computeAccuracyI(X, Xsize, Xrownum, Y, Ysize, Accuracy). computeAccuracyI(X, Xsize, Xrownum, Y, Ysize, Accuracy).
...@@ -143,7 +143,7 @@ foreign(computeAccuracy, c, computeAccuracyI( +pointer(float_array), +integer, ...@@ -143,7 +143,7 @@ foreign(computeAccuracy, c, computeAccuracyI( +pointer(float_array), +integer,
%% --Description-- %% --Description--
%% Train the Linear_svm model with the given training data. %% Train the Linear_svm model with the given training data.
%% %%
train(DataList, DataRows, LabelsList, NumClasses, Optimizer, ObjValue) :- linear_SVM_train(DataList, DataRows, LabelsList, NumClasses, Optimizer, ObjValue) :-
NumClasses >= 0, NumClasses >= 0,
convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)), convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
convert_list_to_float_array(LabelsList, array(Ysize, Y)), convert_list_to_float_array(LabelsList, array(Ysize, Y)),
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment