diff --git a/src/methods/adaboost/adaboost.pl b/src/methods/adaboost/adaboost.pl
index f7416a91884d424d9a0e0db539a3b16aa047c61f..a12de42f5ab70abe06ef61d4e85eb6246953a928 100644
--- a/src/methods/adaboost/adaboost.pl
+++ b/src/methods/adaboost/adaboost.pl
@@ -1,11 +1,11 @@
 
-:- module(adaboost, [   initModelWithTraining/7,
-                        initModelNoTraining/2, 
-                        classify/5, 
-                        numClasses/1, 
-                        getTolerance/1, 
-                        modifyTolerance/1, 
-                        train/8]).
+:- module(adaboost, [   adaboost_initModelWithTraining/7,
+                        adaboost_initModelNoTraining/2, 
+                        adaboost_classify/5, 
+                        adaboost_numClasses/1, 
+                        adaboost_getTolerance/1, 
+                        adaboost_modifyTolerance/1, 
+                        adaboost_train/8]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -38,7 +38,7 @@
 %%
 %%            Initiates the Adaboostmodel and trains it, so classify can be used immediately.
 %%
-initModelWithTraining(MatList, MatRows, VecList, NumClasses, Learner, Iterations, Tolerance) :-
+adaboost_initModelWithTraining(MatList, MatRows, VecList, NumClasses, Learner, Iterations, Tolerance) :-
         NumClasses >= 0,
         Iterations >= 0,
         Tolerance > 0,
@@ -46,11 +46,11 @@ initModelWithTraining(MatList, MatRows, VecList, NumClasses, Learner, Iterations
         convert_list_to_float_array(VecList, array(Ysize, Y)),
         initModelWithTrainingI(X, Xsize, Xrownum, Y, Ysize, NumClasses, Learner, Iterations, Tolerance).
 
-foreign(initModelWithTraining, c, initModelWithTrainingI(+pointer(float_array), +integer, +integer, 
-                                                        +pointer(float_array), +integer, 
-                                                        +integer, 
-                                                        +string, 
-                                                        +integer , +float32)).
+foreign(initModelWithTraining, c, initModelWithTrainingI(       +pointer(float_array), +integer, +integer, 
+                                                                +pointer(float_array), +integer, 
+                                                                +integer, 
+                                                                +string, 
+                                                                +integer , +float32)).
 
 
 
@@ -64,7 +64,7 @@ foreign(initModelWithTraining, c, initModelWithTrainingI(+pointer(float_array),
 %%            Needs to be called first before all other predicates exept initModelWithTraining!
 %%            Initiates the Adaboostmodel but doesnt train it, so train has to be used first before classify can be used.
 %%
-initModelNoTraining(Tolerance, Learner) :-
+adaboost_initModelNoTraining(Tolerance, Learner) :-
         Tolerance > 0,
         initModelNoTrainingI(Tolerance, Learner).
 
@@ -81,7 +81,7 @@ foreign(initModelNoTraining, c, initModelNoTrainingI(+float32, +string)).
 %% --Description--
 %%            Classifies the given data into the number of classes the model was trained for.
 %%
-classify(TestList, TestRows, PredicList, ProbsList, ZRows) :-
+adaboost_classify(TestList, TestRows, PredicList, ProbsList, ZRows) :-
         convert_list_to_float_array(TestList, TestRows, array(Xsize, Xrownum, X)),
         classifyI(X, Xsize, Xrownum, Y, Ysize, Z, ZCols, ZRows),
         convert_float_array_to_list(Y, Ysize, PredicList),
@@ -102,7 +102,7 @@ foreign(classify, c, classifyI(  +pointer(float_array), +integer, +integer,
 %% --Description--
 %%            Returns the amount of classes defined in the model for classification.
 %%
-numClasses(ClassesNum) :-
+adaboost_numClasses(ClassesNum) :-
         numClassesI(ClassesNum).
 
 foreign(numClasses, c, numClassesI([-integer])).
@@ -116,7 +116,7 @@ foreign(numClasses, c, numClassesI([-integer])).
 %% --Description--
 %%            Returns the tolerance of the model.
 %%
-getTolerance(Tolerance) :-
+adaboost_getTolerance(Tolerance) :-
         getToleranceI(Tolerance).
 
 foreign(getTolerance, c, getToleranceI([-float32])).
@@ -130,7 +130,7 @@ foreign(getTolerance, c, getToleranceI([-float32])).
 %% --Description--
 %%            Modifies the tolerance of the model.
 %%
-modifyTolerance(NewTolerance) :-
+adaboost_modifyTolerance(NewTolerance) :-
         NewTolerance > 0,
         modifyToleranceI(NewTolerance).
 
@@ -149,7 +149,7 @@ foreign(modifyTolerance, c, modifyToleranceI(+float32)).
 %%            float  double upper bound training error
 %%
 %% --Description--
-train(MatList, MatRows, VecList, NumClasses, Learner, Iterations, Tolerance, Error) :-
+adaboost_train(MatList, MatRows, VecList, NumClasses, Learner, Iterations, Tolerance, Error) :-
         NumClasses >= 0,
         Iterations >= 0,
         Tolerance >= 0,
@@ -157,7 +157,7 @@ train(MatList, MatRows, VecList, NumClasses, Learner, Iterations, Tolerance, Err
         convert_list_to_float_array(VecList, array(Ysize, Y)),
         trainI(X, Xsize, Xrownum, Y, Ysize, NumClasses, Learner, Iterations, Tolerance, Error).
 
-foreign(train, c, trainI(    +pointer(float_array), +integer, +integer, 
+foreign(train, c, trainI(   +pointer(float_array), +integer, +integer, 
                             +pointer(float_array), +integer, 
                             +integer, 
                             +string, 
diff --git a/src/methods/adaboost/adaboost_test.pl b/src/methods/adaboost/adaboost_test.pl
index 1c1cae0c2c5105031f1d143a973ffa4be4a93bc0..fe9a54f870c2f58054d243e841179068756171e7 100644
--- a/src/methods/adaboost/adaboost_test.pl
+++ b/src/methods/adaboost/adaboost_test.pl
@@ -11,308 +11,309 @@
 
 
 reset_Model_No_Train(Learner) :-
-        initModelNoTraining(0.0001, Learner).
+        adaboost_initModelNoTraining(0.0001, Learner).
 
 reset_Model_With_Train(Learner) :-
-        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, Learner, 50, 0.0001).
+        adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, Learner, 50, 0.0001).
 
 
 
 %%
-%% TESTING predicate initModelWithTraining/7
+%% TESTING predicate adaboost_initModelWithTraining/7
 %%
-:- begin_tests(initModelWithTraining).
+:- begin_tests(adaboost_initModelWithTraining).
 
 %% Failure Tests
 
 test(initModelWithTraining_WrongInputTypes, fail) :-
-        initModelWithTraining(wrong, 3, [0.2,0.2,0.2,0.2], 2, perceptron, 50, 0.0001).
+        adaboost_initModelWithTraining(wrong, 3, [0.2,0.2,0.2,0.2], 2, perceptron, 50, 0.0001).
 
 test(initModelWithTraining_WrongTol, fail) :-
-        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], -2, perceptron, 50, 0.0001).
+        adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], -2, perceptron, 50, 0.0001).
 
 test(initModelWithTraining_WrongLearner, [error(domain_error('perceptron or decision_stump' ,wrongLearner), _)]) :-
-        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, wrongLearner, 50, 0.0001).
+        adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, wrongLearner, 50, 0.0001).
 
 test(initModelWithTraining_WrongIterations, fail) :-
-        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, -50, 0.0001).
+        adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, -50, 0.0001).
 
 test(initModelWithTraining_WrongTol, fail) :-
-        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, 50, -10.0).
+        adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, 50, -10.0).
 
 test(initModelWithTraining_MissmatchingLabels) :-
-        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0, 1 ,1], 2, perceptron, 50, 0.0001).
+        adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0, 1 ,1], 2, perceptron, 50, 0.0001).
 
 
 %% Successful Tests
 
 test(initModelWithTraining_Perceptron) :-
-        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, 50, 0.0001).
+        adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 2, perceptron, 50, 0.0001).
 
 test(initModelWithTraining_DecisionStump) :-
-        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3], 3, [0, 0, 1], 2, decision_stump, 50, 0.0001).
+        adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3], 3, [0, 0, 1], 2, decision_stump, 50, 0.0001).
+
+:- end_tests(adaboost_initModelWithTraining).
 
-:- end_tests(initModelWithTraining).
 
 
 %%
-%% TESTING predicate initModelNoTraining/2
+%% TESTING predicate adaboost_initModelNoTraining/2
 %%
-:- begin_tests(initModelNoTrain).
+:- begin_tests(adaboost_initModelNoTraining).
 
 %% Failure Tests
 
 test(initModelNoTraining_WrongLearner, [error(domain_error('perceptron or decision_stump' ,wrongLearner), _)]) :-
-        initModelNoTraining(0.0001, wrongLearner).
+        adaboost_initModelNoTraining(0.0001, wrongLearner).
 
 test(initModelNoTraining_WrongTol, fail) :-
-        initModelNoTraining(-1.0, perceptron).
+        adaboost_initModelNoTraining(-1.0, perceptron).
 
 %% Successful Tests
 
 test(initModelNoTraining_Perceptron) :-
-        initModelNoTraining(0.001, perceptron).
+        adaboost_initModelNoTraining(0.001, perceptron).
 
 test(initModelNoTraining_DecisionStump) :-
-        initModelNoTraining(0.000014, decision_stump).
+        adaboost_initModelNoTraining(0.000014, decision_stump).
 
-:- end_tests(initModelNoTrain).
+:- end_tests(adaboost_initModelNoTraining).
 
 
 %%
-%% TESTING predicate classify/8
+%% TESTING predicate adaboost_classify/8
 %%
-:- begin_tests(classify).
+:- begin_tests(adaboost_classify).
 
 %% Failure Tests
 test(classify_on_untrained_model, [error(_,system_error('The model is not trained!'))]) :-
         reset_Model_No_Train(perceptron),
-        classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, _, _, _).
+        adaboost_classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, _, _, _).
 
 test(classify_with_bad_data_input_perceptron, [error(_,system_error('The given data matrix has incorrect dimensions compared to the training data!'))]) :-
         reset_Model_With_Train(perceptron),
-        classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 2, _, _, _).
+        adaboost_classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 2, _, _, _).
 
 %% should cause an exeption but doesnt TODO:
 test(classify_with_bad_data_input_decision_stump) :-
         reset_Model_With_Train(decision_stump),
-        classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 5, _, _, _).
+        adaboost_classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 5, _, _, _).
         
 
 %% Successful Tests
 test(classify_perceptron) :-
         reset_Model_No_Train(perceptron),
         take_rows_from_iris_CSV(10, Records),
-        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _),
-        classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4,
+        adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _),
+        adaboost_classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4,
                  [1.0,1.0,1.0,1.0,0.0],
                  [[0.0,1.0],[0.0,1.0],[0.0,1.0],[0.0,1.0],[1.0,0.0]], 2).
 
 test(classify_decision_stump) :-
         reset_Model_No_Train(decision_stump),
         take_rows_from_iris_CSV(10, Records),
-        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, decision_stump, 50, 0.0001, _),
-        classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4,
+        adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, decision_stump, 50, 0.0001, _),
+        adaboost_classify([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4,
                  [1.0,1.0,1.0,1.0,1.0],
                  [[0.0,1.0],[0.0,1.0],[0.0,1.0],[0.0,1.0],[0.0,1.0]], 2).
 
-:- end_tests(classify).
+:- end_tests(adaboost_classify).
 
 
 %%
-%% TESTING predicate numClasses/1
+%% TESTING predicate adaboost_numClasses/1
 %%
-:- begin_tests(numClasses).
+:- begin_tests(adaboost_numClasses).
 
 test(numClasses_Perceptron_NoTrain, [true(Amount =:= 0)]) :-
         reset_Model_No_Train(perceptron),
-        numClasses(Amount).
+        adaboost_numClasses(Amount).
 
 test(numClasses_Decision_Stump_NoTrain, [true(Amount =:= 0)]) :-
         reset_Model_No_Train(decision_stump),
-        numClasses(Amount).
+        adaboost_numClasses(Amount).
 
 test(numClasses_Perceptron_WithTrain, [true(Amount =:= 2)]) :-
         reset_Model_With_Train(perceptron),
-        numClasses(Amount).
+        adaboost_numClasses(Amount).
 
 test(numClasses_Decision_Stump_WithTrain, [true(Amount =:= 2)]) :-
         reset_Model_With_Train(decision_stump),
-        numClasses(Amount).
+        adaboost_numClasses(Amount).
 
 test(numClasses_Custom_NumClasses, [true(Amount =:= 3)]) :-
-        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0001),
-        numClasses(Amount).
+        adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0001),
+        adaboost_numClasses(Amount).
         
 
 test(numClasses_afterTrain_Perceptron, [true(Amount =:= 2)]) :-
         reset_Model_No_Train(perceptron),
         take_rows_from_iris_CSV(10, Records),
-        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _),
-        numClasses(Amount).
+        adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _),
+        adaboost_numClasses(Amount).
         
-:- end_tests(numClasses).
+:- end_tests(adaboost_numClasses).
 
 
 %%
-%% TESTING predicate getTolerance/1
+%% TESTING predicate adaboost_getTolerance/1
 %%
-:- begin_tests(getTolerance).
+:- begin_tests(adaboost_getTolerance).
 
 test(getTolerance_Perceptron_NoTrain, [true(Amount =:= 0.0001)]) :-
         reset_Model_No_Train(perceptron),
-        getTolerance(Amount).
+        adaboost_getTolerance(Amount).
 
 test(getTolerance_Decision_Stump_NoTrain, [true(Amount =:= 0.0001)]) :-
         reset_Model_No_Train(decision_stump),
-        getTolerance(Amount).
+        adaboost_getTolerance(Amount).
 
 test(getTolerance_Perceptron_WithTrain, [true(Amount =:= 0.0001)]) :-
         reset_Model_With_Train(perceptron),
-        getTolerance(Amount).
+        adaboost_getTolerance(Amount).
 
 test(getTolerance_Decision_Stump_WithTrain, [true(Amount =:= 0.0001)]) :-
         reset_Model_With_Train(decision_stump),
-        getTolerance(Amount).
+        adaboost_getTolerance(Amount).
 
 test(getTolerance_Custom_Tolerance, [true(Amount =:= 0.0009)]) :-
-        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0009),
-        getTolerance(Amount).
+        adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0009),
+        adaboost_getTolerance(Amount).
 
 test(getTolerance_afterTrain, [true(Amount =:= 0.0005)]) :-
         reset_Model_No_Train(perceptron),
         take_rows_from_iris_CSV(10, Records),
-        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0005, _),
-        getTolerance(Amount).
+        adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0005, _),
+        adaboost_getTolerance(Amount).
         
-:- end_tests(getTolerance).
+:- end_tests(adaboost_getTolerance).
 
 
 %%
-%% TESTING predicate modifyTolerance/1
+%% TESTING predicate adaboost_modifyTolerance/1
 %%
-:- begin_tests(modifyTolerance).
+:- begin_tests(adaboost_modifyTolerance).
 
 %% Failure Tests
 test(modifyTolerance_With_Negative_Input, fail) :-
         reset_Model_No_Train(perceptron),
-        modifyTolerance(-0.02).
+        adaboost_modifyTolerance(-0.02).
 
 
 %% Successful Tests
 test(modifyTolerance_Perceptron_NoTrain, [true(Amount =:= 0.02)]) :-
         reset_Model_No_Train(perceptron),
-        modifyTolerance(0.02),
-        getTolerance(Amount).
+        adaboost_modifyTolerance(0.02),
+        adaboost_getTolerance(Amount).
 
 test(modifyTolerance_Decision_Stump_NoTrain, [true(Amount =:= 0.02)]) :-
         reset_Model_No_Train(decision_stump),
-        modifyTolerance(0.02),
-        getTolerance(Amount).
+        adaboost_modifyTolerance(0.02),
+        adaboost_getTolerance(Amount).
 
 test(modifyTolerance_Perceptron_WithTrain, [true(Amount =:= 0.02)]) :-
         reset_Model_With_Train(perceptron),
-        modifyTolerance(0.02),
-        getTolerance(Amount).
+        adaboost_modifyTolerance(0.02),
+        adaboost_getTolerance(Amount).
 
 test(modifyTolerance_Decision_Stump_WithTrain, [true(Amount =:= 0.02)]) :-
         reset_Model_With_Train(decision_stump),
-        modifyTolerance(0.02),
-        getTolerance(Amount).
+        adaboost_modifyTolerance(0.02),
+        adaboost_getTolerance(Amount).
 
 test(modifyTolerance_Custom_Tolerance, [true(Amount =:= 0.02)]) :-
-        initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0001),
-        modifyTolerance(0.02),
-        getTolerance(Amount).
+        adaboost_initModelWithTraining([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0, 0, 1, 0], 3, perceptron, 50, 0.0001),
+        adaboost_modifyTolerance(0.02),
+        adaboost_getTolerance(Amount).
 
 test(modifyTolerance_afterTrain, [true(Amount =:= 0.02)]) :-
         reset_Model_No_Train(perceptron),
         take_rows_from_iris_CSV(10, Records),
-        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _),
-        modifyTolerance(0.02),
-        getTolerance(Amount).
+        adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, _),
+        adaboost_modifyTolerance(0.02),
+        adaboost_getTolerance(Amount).
         
-:- end_tests(modifyTolerance).
+:- end_tests(adaboost_modifyTolerance).
 
 
 %%
-%% TESTING predicate train/8
+%% TESTING predicate adaboost_train/8
 %%
-:- begin_tests(train).      
+:- begin_tests(adaboost_train).      
 
 %% Failure Tests
 test(train_With_Bad_NumClass_Input, fail) :-
         reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -2, perceptron, 50, -0.0001, _).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -2, perceptron, 50, -0.0001, _).
                                             
 test(train_With_Bad_Learner_Input, [error(domain_error('perceptron or decision_stump' ,wrongLearner), _)]) :-
         reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, wrongLearner, 50, 0.0001, _).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, wrongLearner, 50, 0.0001, _).
 
 test(train_With_Bad_Iterations_Input, fail) :-
         reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, -50, 0.0001, _).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, -50, 0.0001, _).
 
 test(train_With_Bad_Tol_Input, fail) :-
         reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, -0.0001, _).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, -0.0001, _).
 
 test(train_With_Bad_Labels_Too_Many_Classes, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
         reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
 
 test(train_With_Bad_Labels_Negative_Perceptron, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
         reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, perceptron, 50, 0.0001, _).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, perceptron, 50, 0.0001, _).
 
 %% should cause an exeption but doesnt TODO:
 test(train_With_Bad_Labels_Negative_Decision_Stump) :-
         reset_Model_No_Train(decision_stump),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, decision_stump, 50, 0.0001, _).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, decision_stump, 50, 0.0001, _).
 
 %% seems to be allowed
 test(train_With_Too_Many_Labels) :-
         reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,0,0,1], 2, perceptron, 50, 0.0001, _).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,0,0,1], 2, perceptron, 50, 0.0001, _).
 
 test(train_With_Too_Little_Labels, [error(_,system_error('The given Labels Vector is too short!'))]) :-
         reset_Model_No_Train(decision_stump),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, decision_stump, 50, 0.0001, _).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, decision_stump, 50, 0.0001, _).
 
 test(train_With_Negative_RowNumber, fail) :-
         reset_Model_No_Train(decision_stump),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], -3, [0,0,0,0], 2, decision_stump, 50, 0.0001, _).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], -3, [0,0,0,0], 2, decision_stump, 50, 0.0001, _).
         
 
 %% Successful Tests
 
 test(train_With_Direct_Input_Perceptron, [true(Error =:= 1)]) :-
         reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
 
 test(train_With_Data_From_CSV_Perceptron, [true(Error =:= 0.9797958971132711)]) :-
         reset_Model_No_Train(perceptron),
         take_rows_from_iris_CSV(10, Records),
-        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
+        adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
 
 test(train_With_Direct_Input_Decision_Stump, [true(Error =:= 1)]) :-
         reset_Model_No_Train(decision_stump),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, decision_stump, 50, 0.0001, Error).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, decision_stump, 50, 0.0001, Error).
 
 test(train_With_Data_From_CSV_Decision_Stump, [true(Error =:= 0.9797958971132711)]) :-
         reset_Model_No_Train(decision_stump),
         take_rows_from_iris_CSV(10, Records),
-        train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, decision_stump, 50, 0.0001, Error).
+        adaboost_train(Records, 4, [0,1,0,1,1,0,1,1,1,0], 2, decision_stump, 50, 0.0001, Error).
 
 test(train_After_InitTrain_Perceptron, [true(Error =:= 1)]) :-
         reset_Model_With_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, perceptron, 100, 0.01, Error).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, perceptron, 100, 0.01, Error).
 
 test(train_After_InitTrain_Decision_Stump, [true(Error =:= 1)]) :-
         reset_Model_With_Train(decision_stump),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, decision_stump, 100, 0.01, Error).
+        adaboost_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, decision_stump, 100, 0.01, Error).
 
-:- end_tests(train).
+:- end_tests(adaboost_train).
 
 run_adaboost_tests :-
         run_tests.
diff --git a/src/methods/bayesian_linear_regression/bayesian_linear_regression.pl b/src/methods/bayesian_linear_regression/bayesian_linear_regression.pl
index 410a4ddad5616a6e02b66693aaf958516e389149..794e130245d70f69552fc7244ccf8d839b4672d2 100644
--- a/src/methods/bayesian_linear_regression/bayesian_linear_regression.pl
+++ b/src/methods/bayesian_linear_regression/bayesian_linear_regression.pl
@@ -1,15 +1,15 @@
 
-:- module(bayesian_linear_regression, [ initModel/4, 
-                                        alpha/1, 
-                                        beta/1, 
-                                        dataOffset/1, 
-                                        dataScale/1, 
-                                        omega/1, 
-                                        predict/3, 
-                                        predictWithStd/4, 
-                                        rmse/4, 
-                                        train/3, 
-                                        variance/1]).
+:- module(bayesian_linear_regression, [ blr_initModel/4, 
+                                        blr_alpha/1, 
+                                        blr_beta/1, 
+                                        blr_dataOffset/1, 
+                                        blr_dataScale/1, 
+                                        blr_omega/1, 
+                                        blr_predict/3, 
+                                        blr_predictWithStd/4, 
+                                        blr_rmse/4, 
+                                        blr_train/3, 
+                                        blr_variance/1]).
 
 :- load_files(library(str_decl),
                 [when(compile_time), if(changed)]).
@@ -39,7 +39,7 @@
 %%              Initiates the Model so now train/5 can be called.
 %%              Before predict/5 or predictWitStd/7 can be used train/5 has to be called before
 %%
-initModel(CenterData, ScaleData, NIterMax, Tol) :-
+blr_initModel(CenterData, ScaleData, NIterMax, Tol) :-
         NIterMax >= 0,
         Tol > 0,
         initModelI(CenterData, ScaleData, NIterMax, Tol).
@@ -58,7 +58,7 @@ foreign(initModel,  c, initModelI(      +integer,
 %%              Get the precision (or inverse variance) of the gaussian prior.
 %%              train/5 should be called before.
 %%
-alpha(Alpha) :-
+blr_alpha(Alpha) :-
         alphaI(Alpha).
 
 foreign(alpha,  c, alphaI([-float32])).
@@ -73,7 +73,7 @@ foreign(alpha,  c, alphaI([-float32])).
 %%              Get the precision (or inverse variance) beta of the model.
 %%              train/5 should be called before.
 %%
-beta(Beta) :-
+blr_beta(Beta) :-
         betaI(Beta).
 
 foreign(beta,  c, betaI([-float32])).
@@ -87,7 +87,7 @@ foreign(beta,  c, betaI([-float32])).
 %% --Description--
 %%              Get the mean vector computed on the features over the training points.
 %%
-dataOffset(ResponsesList) :-
+blr_dataOffset(ResponsesList) :-
         dataOffsetI(X, Xsize),
         convert_float_array_to_list(X, Xsize, ResponsesList).
 
@@ -102,7 +102,7 @@ foreign(dataOffset, c, dataOffsetI(-pointer(float_array), -integer)).
 %% --Description--
 %%              Get the vector of standard deviations computed on the features over the training points.
 %%
-dataScale(DataOffsetList) :-
+blr_dataScale(DataOffsetList) :-
         dataScaleI(X, Xsize),
         convert_float_array_to_list(X, Xsize, DataOffsetList).
 
@@ -117,7 +117,7 @@ foreign(dataScale, c, dataScaleI(-pointer(float_array), -integer)).
 %% --Description--
 %%              Get the solution vector.
 %%
-omega(OmegaList) :-
+blr_omega(OmegaList) :-
         omegaI(X, Xsize),
         convert_float_array_to_list(X, Xsize, OmegaList).
 
@@ -133,7 +133,7 @@ foreign(omega, c, omegaI(-pointer(float_array), -integer)).
 %% --Description--
 %%              Predict yi for each data point in the given data matrix using the currently-trained Bayesian Ridge model.
 %%
-predict(PointsList, PointsRows, PredictionsList) :-
+blr_predict(PointsList, PointsRows, PredictionsList) :-
         convert_list_to_float_array(PointsList, PointsRows, array(Xsize, Xrows, X)),
         predictI(X, Xsize, Xrows, Y, Ysize),
         convert_float_array_to_list(Y, Ysize, PredictionsList).
@@ -152,7 +152,7 @@ foreign(predict,  c, predictI(  +pointer(float_array), +integer, +integer,
 %% --Description--
 %%              Predict yi and the standard deviation of the predictive posterior distribution for each data point in the given data matrix, using the currently-trained Bayesian Ridge estimator.
 %%
-predictWithStd(PointsList, PointsRows, PredictionsList, STDList) :-
+blr_predictWithStd(PointsList, PointsRows, PredictionsList, STDList) :-
         convert_list_to_float_array(PointsList, PointsRows, array(Xsize, Xrows, X)),
         predictWithStdI(X, Xsize, Xrows, Y, Ysize, Z, Zsize),
         convert_float_array_to_list(Y, Ysize, PredictionsList),
@@ -173,7 +173,7 @@ foreign(predictWithStd,  c, predictWithStdI(    +pointer(float_array), +integer,
 %% --Description--
 %%              Compute the Root Mean Square Error between the predictions returned by the model and the true responses.
 %%
-rmse(DataList, DataRows, ResponsesList, RMSE) :-
+blr_rmse(DataList, DataRows, ResponsesList, RMSE) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         rmseI(X, Xsize, Xrows, Y, Ysize, RMSE).
@@ -193,7 +193,7 @@ foreign(rmse,  c, rmseI(+pointer(float_array), +integer, +integer,
 %%              Run BayesianLinearRegression.
 %%              The input matrix (like all mlpack matrices) should be column-major each column is an observation and each row is a dimension.
 %%
-train(DataList, DataRows, ResponsesList) :-
+blr_train(DataList, DataRows, ResponsesList) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         trainI(X, Xsize, Xrows, Y, Ysize).
@@ -211,7 +211,7 @@ foreign(train,  c, trainI(      +pointer(float_array), +integer, +integer,
 %%              Get the estimate variance.
 %%              train/5 should be called before.
 %%
-variance(Variance) :-
+blr_variance(Variance) :-
         varianceI(Variance).
 
 foreign(variance,  c, varianceI([-float32])).
diff --git a/src/methods/bayesian_linear_regression/bayesian_linear_regression_test.pl b/src/methods/bayesian_linear_regression/bayesian_linear_regression_test.pl
index 62eb00c67c5a810335cbbb9190b738cf9dab3fa5..6f7fa10437a5e9e3c2306d979ddd563a712701df 100644
--- a/src/methods/bayesian_linear_regression/bayesian_linear_regression_test.pl
+++ b/src/methods/bayesian_linear_regression/bayesian_linear_regression_test.pl
@@ -9,211 +9,211 @@
 
 
 reset_Model :-
-        initModel(1,0,50,0.0001).
+        blr_initModel(1,0,50,0.0001).
 
 
 %%
-%% TESTING predicate initModel/4
+%% TESTING predicate blr_initModel/4
 %%
-:- begin_tests(initModel).      
+:- begin_tests(blr_initModel).      
 
 %% Failure Tests
                                             
 test(bay_lin_reg_InitModel_Negative_NIterMax, fail) :-
-        initModel(0,0,-50,0.0001).
+        blr_initModel(0,0,-50,0.0001).
 
 test(bay_lin_reg_InitModel_Negative_Tolerance, fail) :-
-        initModel(0,0,50,-0.0001).
+        blr_initModel(0,0,50,-0.0001).
         
 
 %% Successful Tests
 
 test(bay_lin_reg_InitModel_Default_Inputs) :-
-        initModel(1,0,50,0.0001).
+        blr_initModel(1,0,50,0.0001).
 
 test(bay_lin_reg_InitModel_Alternative_Inputs) :-
-        initModel(1,1,0,0.0071).
+        blr_initModel(1,1,0,0.0071).
 
-:- end_tests(initModel).
+:- end_tests(blr_initModel).
 
 
 
 %%
-%% TESTING predicate alpha/1
+%% TESTING predicate blr_alpha/1
 %%
-:- begin_tests(alpha).      
+:- begin_tests(blr_alpha).      
 
 %% Failure Tests
 
 test(bay_lin_reg_Alpha_Wrong_Input, fail) :-
         reset_Model,
-        alpha(1).
+        blr_alpha(1).
         
 
 %% Successful Tests
 
 test(bay_lin_reg_Alpha_Std_init, [true(Alpha =:= 0.0)]) :-
         reset_Model,
-        alpha(Alpha).
+        blr_alpha(Alpha).
 
 test(bay_lin_reg_Alpha_After_Train, [true(Alpha =:= 0.12986500952614138)]) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        alpha(Alpha).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_alpha(Alpha).
         
-:- end_tests(alpha).
+:- end_tests(blr_alpha).
 
 
 
 %%
-%% TESTING predicate beta/1
+%% TESTING predicate blr_beta/1
 %%
-:- begin_tests(beta).      
+:- begin_tests(blr_beta).      
 
 %% Failure Tests
 
 test(bay_lin_reg_Beta_Wrong_Input, fail) :-
         reset_Model,
-        beta(1).
+        blr_beta(1).
         
 
 %% Successful Tests
 
 test(bay_lin_reg_Beta_Std_init, [true(Beta =:= 0.0)]) :-
         reset_Model,
-        beta(Beta).
+        blr_beta(Beta).
 
 test(bay_lin_reg_Beta_After_Train, [true(Beta =:= 2.317989668988762E+31)]) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        beta(Beta).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_beta(Beta).
         
-:- end_tests(beta).
+:- end_tests(blr_beta).
 
 
 
 %%
-%% TESTING predicate dataOffset/1
+%% TESTING predicate blr_dataOffset/1
 %%
-:- begin_tests(dataOffset).      
+:- begin_tests(blr_dataOffset).      
 
 %% Failure Tests
                                             
 test(bay_lin_reg_DataOffset_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
         reset_Model,
-        dataOffset(_).
+        blr_dataOffset(_).
         
 
 %% Successful Tests
 
 test(bay_lin_reg_DataOffset_DirektInput) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        dataOffset(ResponsesOffsetList),
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_dataOffset(ResponsesOffsetList),
         print('\nResponsesOffset: '),
         print(ResponsesOffsetList).
 
 test(bay_lin_reg_DataOffset_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
-        dataOffset(ResponsesOffsetList),
+        blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
+        blr_dataOffset(ResponsesOffsetList),
         print('\nResponsesOffset: '),
         print(ResponsesOffsetList).
         
-:- end_tests(dataOffset).
+:- end_tests(blr_dataOffset).
 
 
 
 %%
-%% TESTING predicate dataScale/1
+%% TESTING predicate blr_dataScale/1
 %%
-:- begin_tests(dataScale).      
+:- begin_tests(blr_dataScale).      
 
 %% Failure Tests
                                             
 test(bay_lin_reg_DataScale_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
         reset_Model,
-        dataScale(_).
+        blr_dataScale(_).
         
 
 %% Successful Tests
 
 test(bay_lin_reg_DataScale_DirektInput) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        dataScale(DataOffsetList),
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_dataScale(DataOffsetList),
         print('\nDataOffset: '),
         print(DataOffsetList).
 
 test(bay_lin_reg_DataScale_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
-        dataScale(DataOffsetList),
+        blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
+        blr_dataScale(DataOffsetList),
         print('\nDataOffset: '),
         print(DataOffsetList).
         
-:- end_tests(dataScale).
+:- end_tests(blr_dataScale).
 
 
 
 %%
-%% TESTING predicate omega/1
+%% TESTING predicate blr_omega/1
 %%
-:- begin_tests(omega).      
+:- begin_tests(blr_omega).      
 
 %% Failure Tests
                                             
 test(bay_lin_reg_Omega_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
         reset_Model,
-        omega(_).
+        blr_omega(_).
         
 
 %% Successful Tests
 
 test(bay_lin_reg_Omega_DirektInput) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        omega(OmegaList),
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_omega(OmegaList),
         print('\nOmega: '),
         print(OmegaList).
 
 test(bay_lin_reg_Omega_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
-        omega(OmegaList),
+        blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
+        blr_omega(OmegaList),
         print('\nOmega: '),
         print(OmegaList).
         
-:- end_tests(omega).
+:- end_tests(blr_omega).
 
 
 
 %%
-%% TESTING predicate predict/3
+%% TESTING predicate blr_predict/3
 %%
-:- begin_tests(predict).      
+:- begin_tests(blr_predict).      
 
 %% Failure Tests
                                             
 test(bay_lin_reg_Predict_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
         reset_Model,
-        predict([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _).
+        blr_predict([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _).
 
 test(bay_lin_reg_Predict_Different_Dims_Than_Trained, [error(_,system_error('each_col(): incompatible size; expected 4x1, got 3x1'))]) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        predict([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_predict([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _).
         
 
 %% Successful Tests
 
 test(bay_lin_reg_Predict_Direct_Input) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        predict([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredictionsList),
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_predict([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredictionsList),
         print('\nPredictions: '),
         print(PredictionsList).
 
@@ -221,38 +221,38 @@ test(bay_lin_reg_Predict_CSV_Input) :-
         reset_Model,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
-        predict([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredictionsList),
+        blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
+        blr_predict([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredictionsList),
         print('\nPredictions: '),
         print(PredictionsList).
         
-:- end_tests(predict).
+:- end_tests(blr_predict).
 
 
 
 %%
-%% TESTING predicate predictWithStd/3
+%% TESTING predicate blr_predictWithStd/3
 %%
-:- begin_tests(predictWithStd).      
+:- begin_tests(blr_predictWithStd).      
 
 %% Failure Tests
                                             
 test(bay_lin_reg_PredictWithStd_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
         reset_Model,
-        predictWithStd([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _).
+        blr_predictWithStd([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _).
 
 test(bay_lin_reg_PredictWithStd_Different_Dims_Than_Trained, [error(_,system_error('each_col(): incompatible size; expected 4x1, got 3x1'))]) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        predictWithStd([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _, _).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_predictWithStd([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _, _).
         
 
 %% Successful Tests
 
 test(bay_lin_reg_PredictWithStd_Direct_Input) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        predictWithStd([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5], 3, PredictionsList, STDList),
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_predictWithStd([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5], 3, PredictionsList, STDList),
         print('\nPredictions: '),
         print(PredictionsList),
         print('\nSTD: '),
@@ -262,63 +262,63 @@ test(bay_lin_reg_PredictWithStd_CSV_Input) :-
         reset_Model,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
-        predictWithStd([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredictionsList, STDList),
+        blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
+        blr_predictWithStd([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredictionsList, STDList),
         print('\nPredictions: '),
         print(PredictionsList),
         print('\nSTD: '),
         print(STDList).
         
-:- end_tests(predictWithStd).
+:- end_tests(blr_predictWithStd).
 
 
 
 %%
-%% TESTING predicate rmse/4
+%% TESTING predicate blr_rmse/4
 %%
-:- begin_tests(rmse).      
+:- begin_tests(blr_rmse).      
 
 %% Failure Tests
                                             
 test(bay_lin_reg_RMSE_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
         reset_Model,
-        rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], _).
+        blr_rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], _).
 
 test(bay_lin_reg_RMSE_Too_Small_Label_Dims, [error(_,system_error('subtraction: incompatible matrix dimensions: 1x2 and 1x4'))]) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], _).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], _).
 
 test(bay_lin_reg_RMSE_Too_Large_Label_Dims, [error(_,system_error('subtraction: incompatible matrix dimensions: 1x6 and 1x4'))]) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,1,0,0,0], _).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,1,0,0,0], _).
 
 %% doesnt cause an exception
 test(bay_lin_reg_RMSE_Wrong_Label_Value) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1], _).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1], _).
         
 test(bay_lin_reg_RMSE_Wrong_Data_Dims, [error(_,system_error('each_col(): incompatible size; expected 4x1, got 3x1'))]) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0,1], _).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0,1], _).
 
 test(bay_lin_reg_RMSE_Wrong_Amount_Off_DataPoints, [error(_,system_error('subtraction: incompatible matrix dimensions: 1x10 and 1x5'))]) :-
         reset_Model,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
-        rmse([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,1,0,1,1,1,0], _).
+        blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]),
+        blr_rmse([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,1,0,1,1,1,0], _).
 
 
 %% Successful Tests
 
 test(bay_lin_reg_RMSE_Direct_Input) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], RMSE),
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_rmse([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], RMSE),
         print('\nRMSE: '),
         print(RMSE).
 
@@ -326,71 +326,71 @@ test(bay_lin_reg_RMSE_CSV_Input) :-
         reset_Model,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,5, Data),
-        train(Data, 4, [0,1,0,1,1]),
-        rmse([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,1], RMSE),
+        blr_train(Data, 4, [0,1,0,1,1]),
+        blr_rmse([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,1], RMSE),
         print('\nRMSE: '),
         print(RMSE).
         
-:- end_tests(rmse).
+:- end_tests(blr_rmse).
 
 
 
 %%
-%% TESTING predicate train/3
+%% TESTING predicate blr_train/3
 %%
-:- begin_tests(train).      
+:- begin_tests(blr_train).      
 
 %% Failure Tests
 
 test(bay_lin_reg_Train_Too_Small_Label_Dims, [error(_,system_error('Target dim doesnt fit to the Data dim'))]) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1]).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1]).
 
 test(bay_lin_reg_Train_Too_Large_Label_Dims, [error(_,system_error('Target dim doesnt fit to the Data dim'))]) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,1,0,0,0]).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,1,0,0,0]).
 
 %% doesnt cause an Exception
 test(bay_lin_reg_Train_Wrong_Label_Value) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1]).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1]).
 
 %% Successful Tests
 
 test(bay_lin_reg_Train_Direct_Input) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]).
 
 test(bay_lin_reg_Train_CSV_Input) :-
         reset_Model,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0]).
+        blr_train(Data, 4, [0,1,0,1,1,0,1,1,1,0]).
         
-:- end_tests(train).
+:- end_tests(blr_train).
 
 
 
 %%
-%% TESTING predicate variance/1
+%% TESTING predicate blr_variance/1
 %%
-:- begin_tests(variance).      
+:- begin_tests(blr_variance).      
 
 %% Failure Tests
 
 test(bay_lin_reg_Variance_Before_Train, [error(_,system_error('The Model is not Trained!'))]) :-
         reset_Model,
-        variance(_).
+        blr_variance(_).
         
 
 %% Successful Tests
 
 test(bay_lin_reg_Variance_After_Train, [true(Variance =:= 4.3140830754274083E-32)]) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
-        variance(Variance).
+        blr_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1]),
+        blr_variance(Variance).
         
-:- end_tests(variance).
+:- end_tests(blr_variance).
 
 
 run_bayesian_linear_regression_tests :-
diff --git a/src/methods/decision_tree/decision_tree.pl b/src/methods/decision_tree/decision_tree.pl
index 3bcb22aa147c1f522106d660632130b41a693dbe..16229538656dbab3222b90db952eb16deef6a8df 100644
--- a/src/methods/decision_tree/decision_tree.pl
+++ b/src/methods/decision_tree/decision_tree.pl
@@ -1,8 +1,8 @@
 
-:- module(decision_tree, [      initModel/7,
-                                classifyPoint/3,
-                                classifyMatrix/5,
-                                train/8]).
+:- module(decision_tree, [      decision_tree_initModel/7,
+                                decision_tree_classifyPoint/3,
+                                decision_tree_classifyMatrix/5,
+                                decision_tree_train/8]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -34,7 +34,7 @@
 %%              Construct the decision tree on the given data and labels, assuming that the data is all of the numeric type.
 %%              Setting minimumLeafSize and minimumGainSplit too small may cause the tree to overfit, but setting them too large may cause it to underfit.
 %%
-initModel(DataList, DataRows, LabelsList, NumClasses, MinimumLeafSize, MinimumGainSplit, MaximumDepth) :-
+decision_tree_initModel(DataList, DataRows, LabelsList, NumClasses, MinimumLeafSize, MinimumGainSplit, MaximumDepth) :-
         NumClasses >= 0,
         MinimumLeafSize > 0,
         MinimumGainSplit > 0.0,
@@ -59,7 +59,7 @@ foreign(initModel, c, initModelI(       +pointer(float_array), +integer, +intege
 %% --Description--
 %%              Classify the given point and also return estimates of the probability for each class in the given vector.
 %%
-classifyPoint(DataList, Prediction, AssignList) :-
+decision_tree_classifyPoint(DataList, Prediction, AssignList) :-
         convert_list_to_float_array(DataList, array(Xsize, X)),
         classifyPointI(X, Xsize, Prediction, Y, Ysize),
         convert_float_array_to_list(Y, Ysize, AssignList).
@@ -79,7 +79,7 @@ foreign(classifyPoint, c, classifyPointI(+pointer(float_array), +integer,
 %% --Description--
 %%              Classify the given points and also return estimates of the probabilities for each class in the given matrix.
 %%
-classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols) :-
+decision_tree_classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         classifyMatrixI(X, Xsize, Xrows, Y, Ysize, Z, ZCols, ZRows),
         convert_float_array_to_list(Y, Ysize, PredictionList),
@@ -105,7 +105,7 @@ foreign(classifyMatrix, c, classifyMatrixI(      +pointer(float_array), +integer
 %%              Train the decision tree on the given data, assuming that all dimensions are numeric.
 %%              This will overwrite the given model. Setting minimumLeafSize and minimumGainSplit too small may cause the tree to overfit, but setting them too large may cause it to underfit.
 %%
-train(DataList, DataRows, LabelsList, NumClasses, MinimumLeafSize, MinimumGainSplit, MaximumDepth, Entropy) :-
+decision_tree_train(DataList, DataRows, LabelsList, NumClasses, MinimumLeafSize, MinimumGainSplit, MaximumDepth, Entropy) :-
         NumClasses >= 0,
         MinimumLeafSize > 0,
         MinimumGainSplit > 0.0,
diff --git a/src/methods/decision_tree/decision_tree_test.pl b/src/methods/decision_tree/decision_tree_test.pl
index 712d1c19d78d94df50850c1d890d9f477017fd69..c5ed2ac305808a67b434a58f156f234210933655 100644
--- a/src/methods/decision_tree/decision_tree_test.pl
+++ b/src/methods/decision_tree/decision_tree_test.pl
@@ -7,59 +7,59 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model_With_Train :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,0], 2, 10, 0.5, 0).
+        decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,0], 2, 10, 0.5, 0).
 
 %%
-%% TESTING predicate initModel/7
+%% TESTING predicate decision_tree_initModel/7
 %%
-:- begin_tests(initModel).      
+:- begin_tests(decision_tree_initModel).      
 
 %% Failure Tests
                                             
 test(decision_tree_Negative_NumClass, fail) :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -1, 1, 0.5, 0).
+        decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -1, 1, 0.5, 0).
 
 test(decision_tree_Negative_LeafSize, fail) :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, -1, 0.5, 0).
+        decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, -1, 0.5, 0).
 
 test(decision_tree_Negative_GainSplit, fail) :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, -0.5, 0).
+        decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, -0.5, 0).
 
 test(decision_tree_Too_High_GainSplit, fail) :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 1.5, 0).
+        decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 1.5, 0).
 
 test(decision_tree_Negative_MaxDepth, fail) :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 0.5, -1).
+        decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 0.5, -1).
 
 test(decision_tree_Init_With_Wrong_Label_Dims1, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0], 1, 1, 0.5, 1).
+        decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0], 1, 1, 0.5, 1).
 
 %% If the label vector is to long it seems to cause no problems
 test(decision_tree_Init_With_Wrong_Label_Dims2) :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0,0,0], 1, 1, 0.5, 1).
+        decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0,0,0], 1, 1, 0.5, 1).
 
 %% The same when the label values are out of range
 test(decision_tree_Init_With_Wrong_Label_Value) :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1], 1, 1, 0.5, 1).
+        decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1], 1, 1, 0.5, 1).
         
 
 %% Successful Tests
 
 test(initModel_Direkt_Input_Use) :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, 10, 0.5, 0).
+        decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, 10, 0.5, 0).
 
 test(initModel_Direkt_CSV_Use) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModel(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 2, 0.7, 3).
+        decision_tree_initModel(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 2, 0.7, 3).
 
-:- end_tests(initModel).
+:- end_tests(decision_tree_initModel).
 
 
 %%
-%% TESTING predicate classifyPoint/3
+%% TESTING predicate decision_tree_classifyPoint/3
 %%
-:- begin_tests(classifyPoint).      
+:- begin_tests(decision_tree_classifyPoint).      
 
 %% Failure Tests
 
@@ -67,7 +67,7 @@ test(initModel_Direkt_CSV_Use) :-
 %% so im not certain if this should be forced to fail
 test(classify_Point_With_Wrong_Dims) :-
         reset_Model_With_Train,
-        classifyPoint([5.1,3.5,1.4,1.2,3.3], Prediction, AssignList),
+        decision_tree_classifyPoint([5.1,3.5,1.4,1.2,3.3], Prediction, AssignList),
         print(Prediction),
         print('\n'),
         print(AssignList).
@@ -77,25 +77,25 @@ test(classify_Point_With_Wrong_Dims) :-
 
 test(classify_Point1) :-
         reset_Model_With_Train,
-        classifyPoint([5.1,3.5,1.4], Prediction, AssignList),
+        decision_tree_classifyPoint([5.1,3.5,1.4], Prediction, AssignList),
         print(Prediction),
         print('\n'),
         print(AssignList).
 
 test(classify_Point2) :-
         reset_Model_With_Train,
-        classifyPoint([6.2,1.9,2.3], Prediction, AssignList),
+        decision_tree_classifyPoint([6.2,1.9,2.3], Prediction, AssignList),
         print(Prediction),
         print('\n'),
         print(AssignList).
 
-:- end_tests(classifyPoint).
+:- end_tests(decision_tree_classifyPoint).
 
 
 %%
-%% TESTING predicate classifyMatrix/4
+%% TESTING predicate decision_tree_classifyMatrix/4
 %%
-:- begin_tests(classifyMatrix).      
+:- begin_tests(decision_tree_classifyMatrix).      
 
 %% Failure Tests
 
@@ -103,7 +103,7 @@ test(classify_Point2) :-
 %% so im not certain if this should be forced to fail
 test(classify_Matrix_With_Wrong_Dims1) :-
         reset_Model_With_Train,
-        classifyMatrix([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 5, PredictionList, ProbsList, _),
+        decision_tree_classifyMatrix([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 5, PredictionList, ProbsList, _),
         print(PredictionList),
         print('\n'),
         print(ProbsList).
@@ -112,7 +112,7 @@ test(classify_Matrix_With_Wrong_Dims1) :-
 %% so im not certain if this should be forced to fail
 test(classify_Matrix_With_Wrong_Dims2) :-
         reset_Model_With_Train,
-        classifyMatrix([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 2, PredictionList, ProbsList, _),
+        decision_tree_classifyMatrix([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 2, PredictionList, ProbsList, _),
         print(PredictionList),
         print('\n'),
         print(ProbsList).
@@ -121,22 +121,22 @@ test(classify_Matrix_With_Wrong_Dims2) :-
 %% Successful Tests
 
 test(classify_Matrix_Wierd_Trained_Labels) :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1], 1, 1, 0.5, 1),
-        classifyMatrix([5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4], 3, PredictionList, ProbsList, _),
+        decision_tree_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,5,0,-1], 1, 1, 0.5, 1),
+        decision_tree_classifyMatrix([5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4], 3, PredictionList, ProbsList, _),
         print(PredictionList),
         print('\n'),
         print(ProbsList).
 
 test(classify_Matrix_Direkt_Input1) :-
         reset_Model_With_Train,
-        classifyMatrix([5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4], 3, PredictionList, ProbsList, _),
+        decision_tree_classifyMatrix([5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4, 5.1,3.5,1.4], 3, PredictionList, ProbsList, _),
         print(PredictionList),
         print('\n'),
         print(ProbsList).
 
 test(classify_Matrix_Direkt_Input2) :-
         reset_Model_With_Train,
-        classifyMatrix([2, 2, 3, 5, 1, 4, 1, 1, 4, 0, 3, 5, 0, 5, 5], 3, PredictionList, ProbsList, _),
+        decision_tree_classifyMatrix([2, 2, 3, 5, 1, 4, 1, 1, 4, 0, 3, 5, 0, 5, 5], 3, PredictionList, ProbsList, _),
         print(PredictionList),
         print('\n'),
         print(ProbsList).
@@ -144,69 +144,69 @@ test(classify_Matrix_Direkt_Input2) :-
 test(classify_Matrix_CSV_Trained) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,30, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0], 2, 5, 0.0007, 0, _),
-        classifyMatrix([2, 2, 3, 5, 1, 4, 1, 1, 4, 0, 3, 5, 0, 5, 5, 2, 2, 6, 0, 1], 4, PredictionList, ProbsList, _),
+        decision_tree_train(Data, 4, [0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0], 2, 5, 0.0007, 0, _),
+        decision_tree_classifyMatrix([2, 2, 3, 5, 1, 4, 1, 1, 4, 0, 3, 5, 0, 5, 5, 2, 2, 6, 0, 1], 4, PredictionList, ProbsList, _),
         print(PredictionList),
         print('\n'),
         print(ProbsList).
 
-:- end_tests(classifyMatrix).
+:- end_tests(decision_tree_classifyMatrix).
 
 
 %%
-%% TESTING predicate train/8
+%% TESTING predicate decision_tree_train/8
 %%
-:- begin_tests(train).      
+:- begin_tests(decision_tree_train).      
 
 %% Failure Tests
                                             
 test(decision_tree_Train_Negative_NumClass, fail) :-
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -1, 1, 0.5, 0, _).
+        decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], -1, 1, 0.5, 0, _).
 
 test(decision_tree_Train_Negative_LeafSize, fail) :-
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, -1, 0.5, 0, _).
+        decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, -1, 0.5, 0, _).
 
 test(decision_tree_Train_Negative_GainSplit, fail) :-
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, -0.5, 0, _).
+        decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, -0.5, 0, _).
 
 test(decision_tree_Train_Too_High_GainSplit, fail) :-
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 1.5, 0, _).
+        decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 1.5, 0, _).
 
 test(decision_tree_Train_Negative_MaxDepth, fail) :-
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 0.5, -1, _).
+        decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 1, 1, 0.5, -1, _).
 
 test(decision_tree_Train_Wrong_Label_Dims1, [error(_,system_error('DecisionTree::Train(): number of points (4) does not match number of labels (2)!\n'))]) :-
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0], 1, 1, 0.5, 1, _).
+        decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0], 1, 1, 0.5, 1, _).
 
 test(decision_tree_Train_Wrong_Label_Dims2, [error(_,system_error('DecisionTree::Train(): number of points (4) does not match number of labels (6)!\n'))]) :-
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0,0,0], 1, 1, 0.5, 1, _).
+        decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0,0,0], 1, 1, 0.5, 1, _).
 
 %% there seems to be no check for the label values
 test(decision_tree_Train_Wrong_Labels) :-
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [-1,0,0,5], 1, 1, 0.5, 1, _).
+        decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [-1,0,0,5], 1, 1, 0.5, 1, _).
         
 
 %% Successful Tests
 
 test(initModel_Direkt_Input_Use, [true(Entropy =:= 0.0)]) :-
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, 10, 0.5, 0, Entropy).
+        decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, 10, 0.5, 0, Entropy).
 
 test(initModel_Direkt_CSV_Use, [true(Entropy =:= 0.48)]) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 2, 0.7, 3, Entropy).
+        decision_tree_train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 2, 0.7, 3, Entropy).
 
 test(initModel_Direkt_Input_Use, [true(Entropy =:= 0.0)]) :-
         reset_Model_With_Train,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, 10, 0.7, 0, Entropy).
+        decision_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 3, 10, 0.7, 0, Entropy).
 
 test(initModel_Direkt_CSV_Use, [true(Entropy =:= 0.3767195767195767)]) :-
         reset_Model_With_Train,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,30, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0], 2, 5, 0.0005, 0, Entropy).
+        decision_tree_train(Data, 4, [0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0, 0,1,0,1,1,0,1,1,1,0], 2, 5, 0.0005, 0, Entropy).
 
-:- end_tests(train).
+:- end_tests(decision_tree_train).
 
 run_decision_tree_tests :-
         run_tests.
diff --git a/src/methods/fastmks/fastmks.pl b/src/methods/fastmks/fastmks.pl
index 09235703c243db64eb1d11dbfff8497d28fb43a5..65297349c09d3277fd4d0d64f2d2bccc9ac3d935 100644
--- a/src/methods/fastmks/fastmks.pl
+++ b/src/methods/fastmks/fastmks.pl
@@ -1,7 +1,7 @@
 
-:- module(fastmks, [    initModel/10,
-                        searchWithQuery/8,
-                        searchNoQuery/5]).
+:- module(fastmks, [    fastmks_initModel/10,
+                        fastmks_searchWithQuery/8,
+                        fastmks_searchNoQuery/5]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -35,7 +35,7 @@
 %% --Description--
 %%              Initializes the model on the given reference set.
 %%
-initModel(DataList, DataRows, Kernel, Degree, Offset, Bandwidth, Scale, SingleMode, Naive, Base) :-
+fastmks_initModel(DataList, DataRows, Kernel, Degree, Offset, Bandwidth, Scale, SingleMode, Naive, Base) :-
         Base > 1.0,
         Bandwidth > 0.0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
@@ -61,7 +61,7 @@ foreign(initModel, c, initModelI(       +pointer(float_array), +integer, +intege
 %% --Description--
 %%              Search with a different query set.
 %%
-searchWithQuery(DataList, DataRows, K, IndicesList, YCols, KernelsList, ZCols, Base) :-
+fastmks_searchWithQuery(DataList, DataRows, K, IndicesList, YCols, KernelsList, ZCols, Base) :-
         K > 0,
         Base > 1.0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
@@ -87,7 +87,7 @@ foreign(searchWithQuery, c, searchWithQueryI(   +pointer(float_array), +integer,
 %% --Description--
 %%              Search with the reference set as the query set.
 %%
-searchNoQuery(K, IndicesList, YCols, KernelsList, ZCols) :-
+fastmks_searchNoQuery(K, IndicesList, YCols, KernelsList, ZCols) :-
         K > 0,
         searchNoQueryI(K, Y, YCols, YRows, Z, ZCols, ZRows),
         convert_float_array_to_2d_list(Y, YCols, YRows, IndicesList),
diff --git a/src/methods/fastmks/fastmks_test.pl b/src/methods/fastmks/fastmks_test.pl
index 1d5aeeb3a429fbb3fc96a9d77b157034ac15a5c8..386121ee2e567526b287a92f1012b9f9186b2faf 100644
--- a/src/methods/fastmks/fastmks_test.pl
+++ b/src/methods/fastmks/fastmks_test.pl
@@ -9,29 +9,29 @@
 reset_Model :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModel(Data, 4, linear, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.2).
+        fastmks_initModel(Data, 4, linear, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.2).
 
 
 %%
-%% TESTING predicate initModel/10
+%% TESTING predicate fastmks_initModel/10
 %%
-:- begin_tests(initModel).      
+:- begin_tests(fastmks_initModel).      
 
 %% Failure Tests
 
 test(searchWithQuery_Fastmks_Search_Before_Init, [error(_,system_error('The Model hasnt been trained yet!'))]) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,5, Data),
-        searchWithQuery(Data, 4, 2, _, _, _, _, 1.1).
+        fastmks_searchWithQuery(Data, 4, 2, _, _, _, _, 1.1).
 
 test(searchNoQuery_Fastmks_Search_Before_Init, [error(_,system_error('The Model hasnt been trained yet!'))]) :-
-        searchNoQuery(2, _, _, _, _).
+        fastmks_searchNoQuery(2, _, _, _, _).
                                             
 test(initModel_Fatsmks_WrongKernel_Input, [error(domain_error('The given kernel is unkown!' , wrongKernel), _)]) :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, wrongKernel, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.2).
+        fastmks_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, wrongKernel, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.2).
 
 test(initModel_Fatsmks_Bad_Base_Input, fail) :-
-        initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, linear, 0.0, 0.0, 1.0, 0.0, 0, 0, -0.1).
+        fastmks_initModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, linear, 0.0, 0.0, 1.0, 0.0, 0, 0, -0.1).
         
 
 %% Successful Tests
@@ -39,45 +39,45 @@ test(initModel_Fatsmks_Bad_Base_Input, fail) :-
 test(iniModel_Fastmks_Linear) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModel(Data, 4, linear, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
+        fastmks_initModel(Data, 4, linear, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
 
 test(iniModel_Fastmks_Polynomial) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModel(Data, 4, polynomial, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
+        fastmks_initModel(Data, 4, polynomial, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
 
 test(iniModel_Fastmks_Cosine) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModel(Data, 4, cosine, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
+        fastmks_initModel(Data, 4, cosine, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
 
 test(iniModel_Fastmks_Gaussian) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModel(Data, 4, gaussian, 0.5, 0.5, 0.5, 0.5, 0, 0, 1.5).
+        fastmks_initModel(Data, 4, gaussian, 0.5, 0.5, 0.5, 0.5, 0, 0, 1.5).
 
 test(iniModel_Fastmks_Epanechnikov) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModel(Data, 4, epanechnikov, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
+        fastmks_initModel(Data, 4, epanechnikov, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
 
 test(iniModel_Fastmks_Triangular) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModel(Data, 4, triangular, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
+        fastmks_initModel(Data, 4, triangular, 0.0, 0.0, 1.0, 0.0, 0, 0, 1.5).
 
 test(iniModel_Fastmks_Hyptan) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModel(Data, 4, hyptan, 0.0, 0.0, 1.0, 1.0, 0, 0, 1.5).
+        fastmks_initModel(Data, 4, hyptan, 0.0, 0.0, 1.0, 1.0, 0, 0, 1.5).
 
-:- end_tests(initModel).
+:- end_tests(fastmks_initModel).
 
 
 %%
-%% TESTING predicate searchWithQuery/8
+%% TESTING predicate fastmks_searchWithQuery/8
 %%
-:- begin_tests(searchWithQuery).      
+:- begin_tests(fastmks_searchWithQuery).      
 
 %% Failure Tests
                                             
@@ -85,20 +85,20 @@ test(searchWithQuery_Fastmks_Negative_K, fail) :-
         reset_Model,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,5, Data),
-        searchWithQuery(Data, 4, -2, _, _, _, _, 1.1).
+        fastmks_searchWithQuery(Data, 4, -2, _, _, _, _, 1.1).
 
 test(searchWithQuery_Fastmks_Negative_Base, fail) :-
         reset_Model,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,5, Data),
-        searchWithQuery(Data, 4, 2, _, _, _, _, -1.1).
+        fastmks_searchWithQuery(Data, 4, 2, _, _, _, _, -1.1).
         
 
 %% Successful Tests
 
 test(searchWithQuery_Fastmks_New_Query) :-
         reset_Model,
-        searchWithQuery([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, 2, IndicesList, _, KernelsList, _, 1.1),
+        fastmks_searchWithQuery([3, 2, 0, 5, 1, 4, 0, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, 2, IndicesList, _, KernelsList, _, 1.1),
         print('Indices:\n'),
         print(IndicesList),
         print('Kernels:\n'),
@@ -108,32 +108,32 @@ test(searchWithQuery_Fastmks_Training_Data_Query) :-
         reset_Model,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,5, Data),
-        searchWithQuery(Data, 4, 2, IndicesList, _, KernelsList, _, 1.1),
+        fastmks_searchWithQuery(Data, 4, 2, IndicesList, _, KernelsList, _, 1.1),
         print('Indices:\n'),
         print(IndicesList),
         print('Kernels:\n'),
         print(KernelsList).
 
-:- end_tests(searchWithQuery).
+:- end_tests(fastmks_searchWithQuery).
 
 
 %%
-%% TESTING predicate searchNoQuery/5
+%% TESTING predicate fastmks_searchNoQuery/5
 %%
-:- begin_tests(searchNoQuery).      
+:- begin_tests(fastmks_searchNoQuery).      
 
 %% Failure Tests
                                             
 test(searchNoQuery_Fastmks_Negative_K, fail) :-
         reset_Model,
-        searchNoQuery(-2, _, _, _, _).
+        fastmks_searchNoQuery(-2, _, _, _, _).
         
 
 %% Successful Tests
 
 test(testDescription) :-
         reset_Model,
-        searchNoQuery(2, IndicesList, _, KernelsList, _),
+        fastmks_searchNoQuery(2, IndicesList, _, KernelsList, _),
         print('Indices:\n'),
         print(IndicesList),
         print('Kernels:\n'),
@@ -141,13 +141,13 @@ test(testDescription) :-
 
 test(testDescription) :-
         reset_Model,
-        searchNoQuery(5, IndicesList, _, KernelsList, _),
+        fastmks_searchNoQuery(5, IndicesList, _, KernelsList, _),
         print('Indices:\n'),
         print(IndicesList),
         print('Kernels:\n'),
         print(KernelsList).
 
-:- end_tests(searchNoQuery).
+:- end_tests(fastmks_searchNoQuery).
 
 run_fastmks_tests :-
         run_tests.
diff --git a/src/methods/hoeffding_tree/hoeffding_tree.pl b/src/methods/hoeffding_tree/hoeffding_tree.pl
index 9996abf949b859e17cd512c35815ad195b001dab..e656d2c491275050d60769820d4c857446e6fce3 100644
--- a/src/methods/hoeffding_tree/hoeffding_tree.pl
+++ b/src/methods/hoeffding_tree/hoeffding_tree.pl
@@ -1,7 +1,7 @@
 
-:- module(hoeffding_tree, [     initAndBuildModel/12,
-                                classify/4,
-                                train/4]).
+:- module(hoeffding_tree, [     hoeffding_tree_initAndBuildModel/12,
+                                hoeffding_tree_classify/4,
+                                hoeffding_tree_train/4]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -38,7 +38,7 @@
 %%              Construct the Hoeffding tree with the given parameters and given training data.
 %%              The tree may be trained either in batch mode (which looks at all points before splitting, and propagates these points to the created children for further training), or in streaming mode, where each point is only considered once. (In general, batch mode will give better-performing trees, but will have higher memory and runtime costs for the same dataset.)
 %%
-initAndBuildModel(TreeType, DataList, DataRows, LabelsList, NumClasses, BatchTraining, SuccessProbability, MaxSamples, CheckInterval, MinSamples, Bins, ObservationsBeforeBinning) :-
+hoeffding_tree_initAndBuildModel(TreeType, DataList, DataRows, LabelsList, NumClasses, BatchTraining, SuccessProbability, MaxSamples, CheckInterval, MinSamples, Bins, ObservationsBeforeBinning) :-
         NumClasses >= 0,
         SuccessProbability >= 0,
         SuccessProbability =< 1,
@@ -69,7 +69,7 @@ foreign(initAndBuildModel, c, initAndBuildModelI(       +string,
 %%              Classify the given points, using this node and the entire (sub)tree beneath it.
 %%              The predicted labels for each point are returned, as well as an estimate of the probability that the prediction is correct for each point. This estimate is simply the MajorityProbability for the leaf that each point bins to.
 %%
-classify(TestList, TestRows, PredicList, ProbsList) :-
+hoeffding_tree_classify(TestList, TestRows, PredicList, ProbsList) :-
         convert_list_to_float_array(TestList, TestRows, array(Xsize, Xrownum, X)),
         classifyI(X, Xsize, Xrownum, Y, Ysize, Z, Zsize),
         convert_float_array_to_list(Y, Ysize, PredicList),
@@ -92,7 +92,7 @@ foreign(classify, c, classifyI( +pointer(float_array), +integer, +integer,
 %%              Train in streaming mode on the given dataset.
 %%              This takes one pass. Be sure that initAndBuildModel/14 has been called first!
 %%
-train(DataList, DataRows, LabelsList, BatchTraining) :-
+hoeffding_tree_train(DataList, DataRows, LabelsList, BatchTraining) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(LabelsList, array(Ysize, Y)),
         trainI(X, Xsize, Xrownum, Y, Ysize, BatchTraining).
diff --git a/src/methods/hoeffding_tree/hoeffding_tree_test.pl b/src/methods/hoeffding_tree/hoeffding_tree_test.pl
index 75280e5e39bb15cfc5d64639beb8d3abf022a998..e03e532ae8fd2dd27f028e92df8bbeedebf81cd2 100644
--- a/src/methods/hoeffding_tree/hoeffding_tree_test.pl
+++ b/src/methods/hoeffding_tree/hoeffding_tree_test.pl
@@ -9,106 +9,106 @@
 reset_Model :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 %%
-%% TESTING predicate initAndBuildModel/12
+%% TESTING predicate hoeffding_tree_initAndBuildModel/12
 %%
-:- begin_tests(initAndBuildModel).      
+:- begin_tests(hoeffding_tree_initAndBuildModel).      
 
 %% Failure Tests
 
 test(hoeffding_Init_Classify_Befor_Init, [error(_,system_error('The model is not initialized!'))]) :-
-        classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _).
+        hoeffding_tree_classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _).
                                             
 test(hoeffding_Init_Wrong_TreeType, [error(domain_error('The given TreeType is unkown!' , wrongType), _)]) :-
-        initAndBuildModel(wrongType, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(wrongType, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 test(hoeffding_Init_Negative_NumClass, fail) :-
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 test(hoeffding_Init_Bad_SuccessProbability, fail) :-
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, -1.0, 5000, 100, 100, 10, 100),
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 2.0, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, -1.0, 5000, 100, 100, 10, 100),
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 2.0, 5000, 100, 100, 10, 100).
 
 test(hoeffding_Init_Negative_MaxSamples, fail) :-
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, -5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, -5000, 100, 100, 10, 100).
 
 test(hoeffding_Init_Negative_CheckInterval, fail) :-
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, -100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, -100, 100, 10, 100).
 
 test(hoeffding_Init_Negative_MinSamples, fail) :-
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, -100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, -100, 10, 100).
 
 test(hoeffding_Init_Negative_Bins, fail) :-
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, -10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, -10, 100).
 
 test(hoeffding_Init_Negative_ObservationsBeforeBinning, fail) :-
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, -100).
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, -100).
 
 
 test(hoeffding_Init_With_Wrong_Label_Dims1, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :-
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 %% If the label vector is to long it seems to cause no problems
 test(hoeffding_Init_With_Wrong_Label_Dims2) :-
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 %% The same when the label values are out of range
 test(hoeffding_Init_With_Wrong_Label_Value) :-
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 test(hoeffding_Init_With_Too_Many_Label_Value, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [1,1,0,2], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [1,1,0,2], 2, 0, 0.95, 5000, 100, 100, 10, 100).
         
 
 %% Successful Tests
 
 test(hoeffding_Init_GiniHoeffding_Direkt_Input) :-
-        initAndBuildModel(gini_hoeffding, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_hoeffding, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 test(hoeffding_Init_GiniHoeffding_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(gini_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 test(hoeffding_Init_GiniBinary_Direkt_Input) :-
-        initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 test(hoeffding_Init_GiniBinary_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 test(hoeffding_Init_InfoHoeffding_Direkt_Input) :-
-        initAndBuildModel(info_hoeffding, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(info_hoeffding, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 test(hoeffding_Init_InfoHoeffding_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(info_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(info_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 test(hoeffding_Init_InfoBinary_Direkt_Input) :-
-        initAndBuildModel(info_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(info_binary, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
 test(hoeffding_Init_InfoBinary_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(info_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
+        hoeffding_tree_initAndBuildModel(info_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100).
 
-:- end_tests(initAndBuildModel).
+:- end_tests(hoeffding_tree_initAndBuildModel).
 
 
 %%
-%% TESTING predicate classify/4
+%% TESTING predicate hoeffding_tree_classify/4
 %%
-:- begin_tests(classify).      
+:- begin_tests(hoeffding_tree_classify).      
 
 %% Failure Tests
                                             
 test(hoeffding_Classify_Different_Dims_To_Train, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :-
         reset_Model,
-        classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredicList, ProbsList),
+        hoeffding_tree_classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredicList, ProbsList),
         print('\nPredictions: '),
         print(PredicList),
         print('\nProbabilities: '),
@@ -120,8 +120,8 @@ test(hoeffding_Classify_Different_Dims_To_Train, [error(_,system_error('Labels V
 test(hoeffding_Classify_GiniHoeffding) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(gini_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
-        classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
+        hoeffding_tree_initAndBuildModel(gini_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
+        hoeffding_tree_classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
         print('\nPredictions: '),
         print(PredicList),
         print('\nProbabilities: '),
@@ -130,8 +130,8 @@ test(hoeffding_Classify_GiniHoeffding) :-
 test(hoeffding_Classify_GiniBinary) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
-        classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
+        hoeffding_tree_initAndBuildModel(gini_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
+        hoeffding_tree_classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
         print('\nPredictions: '),
         print(PredicList),
         print('\nProbabilities: '),
@@ -140,8 +140,8 @@ test(hoeffding_Classify_GiniBinary) :-
 test(hoeffding_Classify_InfoHoeffding) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(info_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
-        classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
+        hoeffding_tree_initAndBuildModel(info_hoeffding, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
+        hoeffding_tree_classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
         print('\nPredictions: '),
         print(PredicList),
         print('\nProbabilities: '),
@@ -150,55 +150,55 @@ test(hoeffding_Classify_InfoHoeffding) :-
 test(hoeffding_Classify_InfoBinary) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(info_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
-        classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
+        hoeffding_tree_initAndBuildModel(info_binary, Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0, 0.95, 5000, 100, 100, 10, 100),
+        hoeffding_tree_classify([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, PredicList, ProbsList),
         print('\nPredictions: '),
         print(PredicList),
         print('\nProbabilities: '),
         print(ProbsList).
 
-:- end_tests(classify).
+:- end_tests(hoeffding_tree_classify).
 
 
 %%
-%% TESTING predicate train/4
+%% TESTING predicate hoeffding_tree_train/4
 %%
-:- begin_tests(train).      
+:- begin_tests(hoeffding_tree_train).      
 
 %% Failure Tests
                                             
 test(hoeffding_Train_With_Wrong_Label_Dims1, [error(_,system_error('Labels Vector is too short or its values are incorrect: should fit into [0,numClasses)!'))]) :-
         reset_Model,
-        train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,0,0], 0).
+        hoeffding_tree_train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,0,0], 0).
 
 %% If the label vector is to long it seems to cause no problems
 test(hoeffding_Train_With_Wrong_Label_Dims2) :-
         reset_Model,
-        train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,0,0,1], 0).
+        hoeffding_tree_train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,1,0,1,0,0,1], 0).
 
 %% The same when the label values are out of range
 test(hoeffding_Train_With_Wrong_Label_Value) :-
         reset_Model,
-        train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,-1,0,-1], 0).
+        hoeffding_tree_train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,-1,0,-1], 0).
 
 test(hoeffding_Train_With_Too_Many_Label_Value, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
         reset_Model,
-        train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [1,1,0,2], 0).
+        hoeffding_tree_train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [1,1,0,2], 0).
         
 test(hoeffding_Train_Bad_Data_Dims) :-
         reset_Model,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 0).
+        hoeffding_tree_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 0).
 
 
 %% Successful Tests
 
 test(testDescription3) :-
         reset_Model,
-        train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,0,1,1,0], 1),
-        train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,0,1,1,0], 0).
+        hoeffding_tree_train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,0,1,1,0], 1),
+        hoeffding_tree_train([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, [0,0,1,1,0], 0).
 
-:- end_tests(train).
+:- end_tests(hoeffding_tree_train).
 
 run_hoeffding_tree_tests :-
-        run_tests(train).
+        run_tests(hoeffding_tree_train).
 
diff --git a/src/methods/kde/kde.pl b/src/methods/kde/kde.pl
index cec58620e6fdebe2d8c0a712452d297041f4a29e..1a2951a148518564271662363069ab7ed238f812 100644
--- a/src/methods/kde/kde.pl
+++ b/src/methods/kde/kde.pl
@@ -1,7 +1,7 @@
 
-:- module(kde, [        initAndBuildModel/13,
-                        evaluateWithQuery/3,
-                        evaluateNoQuery/1]).
+:- module(kde, [        kde_initAndBuildModel/13,
+                        kde_evaluateWithQuery/3,
+                        kde_evaluateNoQuery/1]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -39,7 +39,7 @@
 %% --Description--
 %%              Build the KDE model with the given parameters and then trains it with the given reference data.
 %%
-initAndBuildModel(Bandwidth, RelError, AbsError, KernelType, TreeType, Algorithm, MonteCarlo, McProb, InitialSampleSize, MCEntryCoef, MCBreakCoef, DataList, DataRows) :-
+kde_initAndBuildModel(Bandwidth, RelError, AbsError, KernelType, TreeType, Algorithm, MonteCarlo, McProb, InitialSampleSize, MCEntryCoef, MCBreakCoef, DataList, DataRows) :-
         Bandwidth > 0.0,
         RelError >= 0.0, RelError =< 1.0,
         AbsError >= 0.0,
@@ -67,7 +67,7 @@ foreign(initAndBuildModel, c, initAndBuildModelI(+float32, +float32, +float32,
 %% --Description--
 %%              initAndBuildModel/14 has to be called before.
 %%
-evaluateWithQuery(QueryList, QueryRows, EstimationList) :-
+kde_evaluateWithQuery(QueryList, QueryRows, EstimationList) :-
         convert_list_to_float_array(QueryList, QueryRows, array(Xsize, Xrownum, X)),
         evaluateWithQueryI(X, Xsize, Xrownum, Y, Ysize),
         convert_float_array_to_list(Y, Ysize, EstimationList).
@@ -87,7 +87,7 @@ foreign(evaluateWithQuery, c, evaluateWithQueryI(+pointer(float_array), +integer
 %%              If possible, it returns normalized estimations.
 %%              initAndBuildModel/14 has to be called before.
 %%
-evaluateNoQuery(EstimationList) :-
+kde_evaluateNoQuery(EstimationList) :-
         evaluateNoQueryI(Y, Ysize),
         convert_float_array_to_list(Y, Ysize, EstimationList).
 
diff --git a/src/methods/kde/kde_test.pl b/src/methods/kde/kde_test.pl
index 6e8beba619b0e48e83bbc4663f07c48eba295e74..be92448dfdc98d9a4f8ed2b1ef277f7c8f485dbf 100644
--- a/src/methods/kde/kde_test.pl
+++ b/src/methods/kde/kde_test.pl
@@ -7,61 +7,61 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model :-
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 
 %%
-%% TESTING predicate initAndBuildModel/13
+%% TESTING predicate kde_initAndBuildModel/13
 %%
-:- begin_tests(initAndBuildModel).      
+:- begin_tests(kde_initAndBuildModel).      
 
 %% Failure Tests
 
 test(kde_Init_Negative_Bandwidth, fail) :-
-        initAndBuildModel(-1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(-1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Bad_RelError, fail) :-
-        initAndBuildModel(1.0, -0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, -0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 1.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 1.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Negative_AbsError, fail) :-
-        initAndBuildModel(1.0, 0.05, -1.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, -1.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Wrong_KernelType, [error(domain_error('The given KernelType is unknown!' , wrongType), _)]) :-
-        initAndBuildModel(1.0, 0.05, 0.0, wrongType, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, wrongType, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Wrong_TreeType, [error(domain_error('The given TreeType is unknown!' , wrongType), _)]) :-
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, wrongType, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, wrongType, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Wrong_Algorithm, [error(domain_error('The given Algorithm is unknown!' , wrongType), _)]) :-
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, wrongType, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, wrongType, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Bad_McProb, fail) :-
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, -0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, -0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 1.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 1.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Negative_InitialSampleSize, fail) :-
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, -100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, -100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Negative_MCEntryCoef, fail) :-
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 0.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 0.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Bad_MCBreakCoef, fail) :-
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, -0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, -0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 1.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 1.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
         
 
@@ -69,164 +69,164 @@ test(kde_Init_Bad_MCBreakCoef, fail) :-
 
 %% Gaussian Kernel
 test(kde_Init_Gaussian_KDTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Gaussian_BALLTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Gaussian_COVERTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Gaussian_OCTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Gaussian_RTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, gaussian, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, gaussian, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 
 
 %% Epanechnikov Kernel
 test(kde_Init_Epanechnikov_KDTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Epanechnikov_BALLTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Epanechnikov_COVERTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Epanechnikov_OCTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Epanechnikov_RTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, epanechnikov, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 
 
 %% Laplacian Kernel
 test(kde_Init_Laplacian_KDTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, laplacian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, laplacian, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, laplacian, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, laplacian, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Laplacian_BALLTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, laplacian, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, laplacian, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, laplacian, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, laplacian, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Laplacian_COVERTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, laplacian, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, laplacian, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, laplacian, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, laplacian, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Laplacian_OCTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, laplacian, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, laplacian, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, laplacian, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, laplacian, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Laplacian_RTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, laplacian, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, laplacian, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, laplacian, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, laplacian, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 
 
 %% Spherical Kernel
 test(kde_Init_Spherical_KDTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, spherical, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, spherical, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, spherical, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, spherical, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Spherical_BALLTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, spherical, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, spherical, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, spherical, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, spherical, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Spherical_COVERTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, spherical, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, spherical, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, spherical, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, spherical, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Spherical_OCTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, spherical, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, spherical, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, spherical, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, spherical, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Spherical_RTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, spherical, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, spherical, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, spherical, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, spherical, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 
 %% Triangular Kernel
 test(kde_Init_Triangular_KDTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, triangular, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, triangular, kd_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, triangular, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, triangular, kd_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Triangular_BALLTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, triangular, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, triangular, ball_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, triangular, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, triangular, ball_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Triangular_COVERTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, triangular, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, triangular, cover_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, triangular, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, triangular, cover_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Triangular_OCTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, triangular, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, triangular, octree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, triangular, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, triangular, octree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kde_Init_Triangular_RTREE) :-
-        initAndBuildModel(1.0, 0.05, 0.0, triangular, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, triangular, r_tree, dual_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(1.0, 0.05, 0.0, triangular, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
+        kde_initAndBuildModel(1.0, 0.05, 0.0, triangular, r_tree, single_tree, 0, 0.95, 100, 3.0, 0.4,
                           [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 
@@ -234,21 +234,21 @@ test(kde_Init_Triangular_RTREE) :-
 test(kde_Init_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(2.2, 0.25, 0.5, gaussian, kd_tree, dual_tree, 1, 0.75, 50, 2.0, 0.2,
+        kde_initAndBuildModel(2.2, 0.25, 0.5, gaussian, kd_tree, dual_tree, 1, 0.75, 50, 2.0, 0.2,
                           Data, 4).
 
-:- end_tests(initAndBuildModel).
+:- end_tests(kde_initAndBuildModel).
 
 
 %%
-%% TESTING predicate evaluateWithQuery/3
+%% TESTING predicate kde_evaluateWithQuery/3
 %%
-:- begin_tests(evaluateWithQuery).      
+:- begin_tests(kde_evaluateWithQuery).      
 
 %% Failure Tests
 test(kde_EvalWithQuery_Wrong_Query_Dims, [error(_, system_error('cannot evaluate KDE model: querySet and referenceSet dimensions don\'t match'))]) :-
         reset_Model,
-        evaluateWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, Estimation),
+        kde_evaluateWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, Estimation),
         print('\nEstimation: '),
         print(Estimation).
         
@@ -257,26 +257,26 @@ test(kde_EvalWithQuery_Wrong_Query_Dims, [error(_, system_error('cannot evaluate
 
 test(kde_EvalWithQuery) :-
         reset_Model,
-        evaluateWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5], 3, Estimation),
+        kde_evaluateWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5], 3, Estimation),
         print('\nEstimation: '),
         print(Estimation).
 
 test(kde_EvalWithQuery_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(2.2, 0.25, 0.5, gaussian, kd_tree, dual_tree, 1, 0.75, 50, 2.0, 0.2,
+        kde_initAndBuildModel(2.2, 0.25, 0.5, gaussian, kd_tree, dual_tree, 1, 0.75, 50, 2.0, 0.2,
                           Data, 4),
-        evaluateWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, Estimation),
+        kde_evaluateWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, Estimation),
         print('\nEstimation: '),
         print(Estimation).
 
-:- end_tests(evaluateWithQuery).
+:- end_tests(kde_evaluateWithQuery).
 
 
 %%
-%% TESTING predicate evaluateNoQuery/1
+%% TESTING predicate kde_evaluateNoQuery/1
 %%
-:- begin_tests(evaluateNoQuery).      
+:- begin_tests(kde_evaluateNoQuery).      
 
 %% Failure Tests
         
@@ -285,11 +285,11 @@ test(kde_EvalWithQuery_CSV_Input) :-
 
 test(kde_EvalNoQuery) :-
         reset_Model,
-        evaluateNoQuery(Estimation),
+        kde_evaluateNoQuery(Estimation),
         print('\nEstimation: '),
         print(Estimation).
 
-:- end_tests(evaluateNoQuery).
+:- end_tests(kde_evaluateNoQuery).
 
 run_kde_tests :-
         run_tests.
diff --git a/src/methods/kernel_pca/kernel_pca.pl b/src/methods/kernel_pca/kernel_pca.pl
index 01a1c62f7af1e806edf43c94a85b11b25728bb32..687b496162379b4df8efe63289f6e05f36e04e63 100644
--- a/src/methods/kernel_pca/kernel_pca.pl
+++ b/src/methods/kernel_pca/kernel_pca.pl
@@ -27,12 +27,12 @@
 %%              float32 bandwidth               needed by gaussian, epanechnikov, laplacian,
 %%              float32 scale                   needed by hyptan,
 %%              mat     data
+%%              int     newDimension
 %%
 %% --Output--
 %%              mat     transformedData,
 %%              vec     eigenValues,
-%%              mat     eigenVectores,
-%%              int     newDimension
+%%              mat     eigenVectores
 %%
 %% --Description--
 %%              This program performs Kernel Principal Components Analysis (KPCA) on the specified dataset with the specified kernel. This will transform the data onto the kernel principal components, and optionally reduce the dimensionality by ignoring the kernel principal components with the smallest eigenvalues.
diff --git a/src/methods/kfn/kfn.pl b/src/methods/kfn/kfn.pl
index 21fc2b8fc0e25fe37af85755db7f8f6286d73b31..8a5f21c360cb75e37df77150514840aaacc77f8b 100644
--- a/src/methods/kfn/kfn.pl
+++ b/src/methods/kfn/kfn.pl
@@ -1,7 +1,7 @@
 
-:- module(kfn, [        initAndBuildModel/7,
-                        searchWithQuery/7,
-                        searchNoQuery/5]).
+:- module(kfn, [        kfn_initAndBuildModel/7,
+                        kfn_searchWithQuery/7,
+                        kfn_searchNoQuery/5]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -33,7 +33,7 @@
 %% --Description--
 %%              Initialize the Model and build it.
 %%
-initAndBuildModel(TreeType, SearchMode, RandomBasis, LeafSize, Epsilon, ReferenceList, ReferenceRows) :-
+kfn_initAndBuildModel(TreeType, SearchMode, RandomBasis, LeafSize, Epsilon, ReferenceList, ReferenceRows) :-
         LeafSize >= 1,
         Epsilon >= 0,
         convert_list_to_float_array(ReferenceList, ReferenceRows, array(Xsize, Xrownum, X)),
@@ -56,7 +56,7 @@ foreign(initAndBuildModel, c, initAndBuildModelI(+string, +string,
 %% --Description--
 %%              Perform neighbor search on the queryset.
 %%
-searchWithQuery(QueryList, QueryRows, K, NeighborsList, YCols, DistancesList, ZCols) :-
+kfn_searchWithQuery(QueryList, QueryRows, K, NeighborsList, YCols, DistancesList, ZCols) :-
         K > 0,
         convert_list_to_float_array(QueryList, QueryRows, array(Xsize, Xrownum, X)),
         searchWithQueryI(X, Xsize, Xrownum, K, Y, YCols, YRows, Z, ZCols, ZRows),
@@ -80,7 +80,7 @@ foreign(searchWithQuery, c, searchWithQueryI(   +pointer(float_array), +integer,
 %% --Description--
 %%              Perform monochromatic neighbor search.
 %%
-searchNoQuery(K, NeighborsList, YCols, DistancesList, ZCols) :-
+kfn_searchNoQuery(K, NeighborsList, YCols, DistancesList, ZCols) :-
         K > 0,
         searchNoQueryI(K, Y, YCols, YRows, Z, ZCols, ZRows),
         convert_float_array_to_2d_list(Y, YCols, YRows, NeighborsList),
diff --git a/src/methods/kfn/kfn_test.pl b/src/methods/kfn/kfn_test.pl
index 035a06a882681846a8cc8c030d8eb9f78ba9086f..d15258f57d481ccfae96b5bee7514243f5432843 100644
--- a/src/methods/kfn/kfn_test.pl
+++ b/src/methods/kfn/kfn_test.pl
@@ -7,149 +7,149 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model :-
-        initAndBuildModel(kd, dual_tree, 0, 20, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(kd, dual_tree, 0, 20, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 %%
-%% TESTING predicate initAndBuildModel/9
+%% TESTING predicate kfn_initAndBuildModel/9
 %%
-:- begin_tests(initAndBuildModel).      
+:- begin_tests(kfn_initAndBuildModel).      
 
 %% Failure Tests
                                             
 test(kfn_InitAndBuildModel_Wrong_TreeType_Input, [error(domain_error('The given TreeType is unknown!' , wrongInput), _)]) :-
-        initAndBuildModel(wrongInput, dual_tree, 0, 20, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(wrongInput, dual_tree, 0, 20, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_Wrong_SearchMode_Input, [error(domain_error('The given SearchMode is unknown!' , wrongInput), _)]) :-
-        initAndBuildModel(kd, wrongInput, 0, 20, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(kd, wrongInput, 0, 20, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_Negative_LeafSize, fail) :-
-        initAndBuildModel(kd, dual_tree, 0, 0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(kd, dual_tree, 0, 0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_Negative_Epsilon, fail) :-
-        initAndBuildModel(kd, dual_tree, 0, 20, -1.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(kd, dual_tree, 0, 20, -1.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
         
 
 %% Successful Tests
 
 test(kfn_InitAndBuildModel_KD) :-
-        initAndBuildModel(kd, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(kd, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(kd, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(kd, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(kd, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(kd, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(kd, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(kd, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_VP) :-
-        initAndBuildModel(vp, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(vp, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(vp, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(vp, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(vp, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(vp, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(vp, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(vp, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_RP) :-
-        initAndBuildModel(rp, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(rp, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(rp, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(rp, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(rp, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(rp, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(rp, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(rp, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_MAX_RP) :-
-        initAndBuildModel(max_rp, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(max_rp, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(max_rp, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(max_rp, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(max_rp, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(max_rp, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(max_rp, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(max_rp, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_UB) :-
-        initAndBuildModel(ub, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(ub, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(ub, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(ub, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(ub, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(ub, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(ub, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(ub, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_COVER) :-
-        initAndBuildModel(cover, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(cover, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(cover, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(cover, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(cover, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(cover, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(cover, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(cover, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_R) :-
-        initAndBuildModel(r, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(r, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(r, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(r, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(r, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_R_STAR) :-
-        initAndBuildModel(r_star, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_star, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_star, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_star, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(r_star, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(r_star, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(r_star, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(r_star, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_X) :-
-        initAndBuildModel(x, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(x, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(x, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(x, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(x, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(x, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(x, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(x, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_BALL) :-
-        initAndBuildModel(ball, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(ball, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(ball, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(ball, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(ball, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(ball, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(ball, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(ball, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_Hilbert_R) :-
-        initAndBuildModel(hilbert_r, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(hilbert_r, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(hilbert_r, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(hilbert_r, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(hilbert_r, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(hilbert_r, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(hilbert_r, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(hilbert_r, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_R_Plus) :-
-        initAndBuildModel(r_plus, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_plus, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_plus, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_plus, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(r_plus, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(r_plus, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(r_plus, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(r_plus, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_R_Plus_Plus) :-
-        initAndBuildModel(r_plus_plus, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_plus_plus, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_plus_plus, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_plus_plus, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(r_plus_plus, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(r_plus_plus, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(r_plus_plus, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(r_plus_plus, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_OCT) :-
-        initAndBuildModel(oct, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(oct, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(oct, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(oct, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        kfn_initAndBuildModel(oct, naive, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(oct, single_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(oct, dual_tree, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        kfn_initAndBuildModel(oct, greedy, 0, 20, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(kfn_InitAndBuildModel_CSV_Input) :-
         reset_Model,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(kd, dual_tree, 1, 20, 0.005, Data, 4).
+        kfn_initAndBuildModel(kd, dual_tree, 1, 20, 0.005, Data, 4).
 
-:- end_tests(initAndBuildModel).
+:- end_tests(kfn_initAndBuildModel).
 
 
 
 %%
-%% TESTING predicate searchWithQuery/7
+%% TESTING predicate kfn_searchWithQuery/7
 %%
-:- begin_tests(searchWithQuery).      
+:- begin_tests(kfn_searchWithQuery).      
 
 %% Failure Tests
                                    
 test(kfn_SearchWithQuery_Wrong_Query_Dims, [error(_,system_error('Queryset has Dim(4) but the Referenceset has Dim(3)'))]) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, 2, _, _, _, _).
+        kfn_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, 2, _, _, _, _).
 
 test(kfn_SearchWithQuery_Negative_K, fail) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, -1, _, _, _, _).
+        kfn_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, -1, _, _, _, _).
 
 test(kfn_SearchWithQuery_Too_Large_K, [error(_,system_error('Requested value of k (10) is greater than the number of points in the reference set (4)'))]) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 10, _, _, _, _).
+        kfn_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 10, _, _, _, _).
         
 
 %% Successful Tests
 
 test(kfn_SearchWithQuery_Normal) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 2, NeighborsList, _, DistancesList, _),
+        kfn_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 2, NeighborsList, _, DistancesList, _),
         print('\nNeighbors: '),
         print(NeighborsList),
         print('\nDistances: '),
@@ -158,38 +158,38 @@ test(kfn_SearchWithQuery_Normal) :-
 test(kfn_SearchWithQuery_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(kd, dual_tree, 0, 20, 0.0, Data, 4),
-        searchWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, 7, NeighborsList, _, DistancesList, _),
+        kfn_initAndBuildModel(kd, dual_tree, 0, 20, 0.0, Data, 4),
+        kfn_searchWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, 7, NeighborsList, _, DistancesList, _),
         print('\nNeighbors: '),
         print(NeighborsList),
         print('\nDistances: '),
         print(DistancesList).
 
-:- end_tests(searchWithQuery).
+:- end_tests(kfn_searchWithQuery).
 
 
 
 %%
-%% TESTING predicate searchNoQuery/10
+%% TESTING predicate kfn_searchNoQuery/10
 %%
-:- begin_tests(searchNoQuery).      
+:- begin_tests(kfn_searchNoQuery).      
 
 %% Failure Tests
                                             
 test(kfn_SearchNoQuery_Negative_K, fail) :-
         reset_Model,
-        searchNoQuery(-1, _, _, _, _).
+        kfn_searchNoQuery(-1, _, _, _, _).
 
 test(kfn_SearchNoQuery_Too_Large_K, [error(_,system_error('Requested value of k (15) is greater than the number of points in the reference set (4)'))]) :-
         reset_Model,
-        searchNoQuery(15, _, _, _, _).
+        kfn_searchNoQuery(15, _, _, _, _).
         
 
 %% Successful Tests
 
 test(kfn_SearchNoQuery_Normal) :-
         reset_Model,
-        searchNoQuery(2, NeighborsList, _, DistancesList, _),
+        kfn_searchNoQuery(2, NeighborsList, _, DistancesList, _),
         print('\nNeighbors: '),
         print(NeighborsList),
         print('\nDistances: '),
@@ -198,14 +198,14 @@ test(kfn_SearchNoQuery_Normal) :-
 test(kfn_SearchNoQuery_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(kd, dual_tree, 0, 20, 0.0, Data, 4),
-        searchNoQuery(7, NeighborsList, _, DistancesList, _),
+        kfn_initAndBuildModel(kd, dual_tree, 0, 20, 0.0, Data, 4),
+        kfn_searchNoQuery(7, NeighborsList, _, DistancesList, _),
         print('\nNeighbors: '),
         print(NeighborsList),
         print('\nDistances: '),
         print(DistancesList).
 
-:- end_tests(searchNoQuery).
+:- end_tests(kfn_searchNoQuery).
 
 run_kfn_tests :-
         run_tests.
diff --git a/src/methods/knn/knn.pl b/src/methods/knn/knn.pl
index f4d0cac42953dfe2d9d4f1daa34c1c1037ca8ff7..c410a6c2dfe72ff869ce68790d4699d605351545 100644
--- a/src/methods/knn/knn.pl
+++ b/src/methods/knn/knn.pl
@@ -1,7 +1,7 @@
 
-:- module(knn, [        initAndBuildModel/9,
-                        searchWithQuery/7,
-                        searchNoQuery/5]).
+:- module(knn, [        knn_initAndBuildModel/9,
+                        knn_searchWithQuery/7,
+                        knn_searchNoQuery/5]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -34,7 +34,7 @@
 %% --Description--
 %%              Initialize the Model and build it.
 %%
-initAndBuildModel(TreeType, SearchMode, RandomBasis, LeafSize, Tau, Rho, Epsilon, ReferenceList, ReferenceRows) :-
+knn_initAndBuildModel(TreeType, SearchMode, RandomBasis, LeafSize, Tau, Rho, Epsilon, ReferenceList, ReferenceRows) :-
         LeafSize >= 1,
         Tau >= 0,
         Rho >= 0,
@@ -60,7 +60,7 @@ foreign(initAndBuildModel, c, initAndBuildModelI(       +string, +string,
 %% --Description--
 %%              Perform neighbor search on the queryset.
 %%
-searchWithQuery(QueryList, QueryRows, K, NeighborsList, YCols, DistancesList, ZCols) :-
+knn_searchWithQuery(QueryList, QueryRows, K, NeighborsList, YCols, DistancesList, ZCols) :-
         K > 0,
         convert_list_to_float_array(QueryList, QueryRows, array(Xsize, Xrownum, X)),
         searchWithQueryI(X, Xsize, Xrownum, K, Y, YCols, YRows, Z, ZCols, ZRows),
@@ -83,7 +83,7 @@ foreign(searchWithQuery, c, searchWithQueryI(   +pointer(float_array), +integer,
 %% --Description--
 %%              Perform monochromatic neighbor search.
 %%
-searchNoQuery(K, NeighborsList, YCols, DistancesList, ZCols) :-
+knn_searchNoQuery(K, NeighborsList, YCols, DistancesList, ZCols) :-
         K > 0,
         searchNoQueryI(K, Y, YCols, YRows, Z, ZCols, ZRows),
         convert_float_array_to_2d_list(Y, YCols, YRows, NeighborsList),
diff --git a/src/methods/knn/knn_test.pl b/src/methods/knn/knn_test.pl
index 6b377527f8e6c9e6833c0981a538f37eb9c07b72..9372ef81ae2b2c9c371d1331bf869f733cfa752d 100644
--- a/src/methods/knn/knn_test.pl
+++ b/src/methods/knn/knn_test.pl
@@ -7,162 +7,162 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model :-
-        initAndBuildModel(kd, dual_tree, 0, 20, 0.7, 0.0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(kd, dual_tree, 0, 20, 0.7, 0.0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 %%
-%% TESTING predicate initAndBuildModel/9
+%% TESTING predicate knn_initAndBuildModel/9
 %%
-:- begin_tests(initAndBuildModel).      
+:- begin_tests(knn_initAndBuildModel).      
 
 %% Failure Tests
                                             
 test(knn_InitAndBuildModel_Wrong_TreeType_Input, [error(domain_error('The given TreeType is unknown!' , wrongInput), _)]) :-
-        initAndBuildModel(wrongInput, dual_tree, 0, 20, 0.7, 0.0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(wrongInput, dual_tree, 0, 20, 0.7, 0.0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_Wrong_SearchMode_Input, [error(domain_error('The given SearchMode is unknown!' , wrongInput), _)]) :-
-        initAndBuildModel(kd, wrongInput, 0, 20, 0.7, 0.0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(kd, wrongInput, 0, 20, 0.7, 0.0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_Negative_LeafSize, fail) :-
-        initAndBuildModel(kd, dual_tree, 0, 0, 0.7, 0.0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(kd, dual_tree, 0, 0, 0.7, 0.0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_Negative_Tau, fail) :-
-        initAndBuildModel(kd, dual_tree, 0, 20, -0.7, 0.0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(kd, dual_tree, 0, 20, -0.7, 0.0, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_Bad_Rho_Input, fail) :-
-        initAndBuildModel(kd, dual_tree, 0, 20, 0.7, -0.5, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(kd, dual_tree, 0, 20, 0.7, 1.5, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(kd, dual_tree, 0, 20, 0.7, -0.5, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(kd, dual_tree, 0, 20, 0.7, 1.5, 0.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_Negative_Epsilon, fail) :-
-        initAndBuildModel(kd, dual_tree, 0, 20, 0.7, 0.0, -1.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(kd, dual_tree, 0, 20, 0.7, 0.0, -1.0, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
         
 
 %% Successful Tests
 
 test(knn_InitAndBuildModel_KD) :-
-        initAndBuildModel(kd, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(kd, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(kd, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(kd, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(kd, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(kd, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(kd, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(kd, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_VP) :-
-        initAndBuildModel(vp, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(vp, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(vp, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(vp, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(vp, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(vp, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(vp, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(vp, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_RP) :-
-        initAndBuildModel(rp, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(rp, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(rp, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(rp, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(rp, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(rp, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(rp, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(rp, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_MAX_RP) :-
-        initAndBuildModel(max_rp, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(max_rp, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(max_rp, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(max_rp, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(max_rp, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(max_rp, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(max_rp, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(max_rp, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_UB) :-
-        initAndBuildModel(ub, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(ub, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(ub, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(ub, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(ub, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(ub, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(ub, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(ub, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_COVER) :-
-        initAndBuildModel(cover, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(cover, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(cover, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(cover, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(cover, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(cover, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(cover, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(cover, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_R) :-
-        initAndBuildModel(r, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(r, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(r, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(r, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(r, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_R_STAR) :-
-        initAndBuildModel(r_star, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_star, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_star, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_star, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(r_star, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(r_star, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(r_star, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(r_star, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_X) :-
-        initAndBuildModel(x, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(x, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(x, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(x, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(x, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(x, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(x, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(x, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_BALL) :-
-        initAndBuildModel(ball, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(ball, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(ball, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(ball, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(ball, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(ball, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(ball, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(ball, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_Hilbert_R) :-
-        initAndBuildModel(hilbert_r, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(hilbert_r, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(hilbert_r, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(hilbert_r, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(hilbert_r, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(hilbert_r, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(hilbert_r, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(hilbert_r, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_R_Plus) :-
-        initAndBuildModel(r_plus, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_plus, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_plus, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_plus, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(r_plus, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(r_plus, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(r_plus, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(r_plus, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_R_Plus_Plus) :-
-        initAndBuildModel(r_plus_plus, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_plus_plus, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_plus_plus, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(r_plus_plus, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(r_plus_plus, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(r_plus_plus, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(r_plus_plus, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(r_plus_plus, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_SPILL) :-
-        initAndBuildModel(spill, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(spill, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(spill, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(spill, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(spill, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(spill, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(spill, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(spill, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_OCT) :-
-        initAndBuildModel(oct, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(oct, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(oct, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
-        initAndBuildModel(oct, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
+        knn_initAndBuildModel(oct, naive, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(oct, single_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(oct, dual_tree, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3),
+        knn_initAndBuildModel(oct, greedy, 0, 20, 0.7, 0.01, 0.005, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3).
 
 test(knn_InitAndBuildModel_CSV_Input) :-
         reset_Model,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(kd, dual_tree, 1, 20, 0.7, 0.01, 0.005, Data, 4).
+        knn_initAndBuildModel(kd, dual_tree, 1, 20, 0.7, 0.01, 0.005, Data, 4).
 
-:- end_tests(initAndBuildModel).
+:- end_tests(knn_initAndBuildModel).
 
 
 
 %%
-%% TESTING predicate searchWithQuery/7
+%% TESTING predicate knn_searchWithQuery/7
 %%
-:- begin_tests(searchWithQuery).      
+:- begin_tests(knn_searchWithQuery).      
 
 %% Failure Tests
                                    
 test(knn_SearchWithQuery_Wrong_Query_Dims, [error(_,system_error('Queryset has Dim(4) but the Referenceset has Dim(3)'))]) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, 2, _, _, _, _).
+        knn_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, 2, _, _, _, _).
 
 test(knn_SearchWithQuery_Negative_K, fail) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, -1, _, _, _, _).
+        knn_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, -1, _, _, _, _).
 
 test(knn_SearchWithQuery_Too_Large_K, [error(_,system_error('Requested value of k (10) is greater than the number of points in the reference set (4)'))]) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 10, _, _, _, _).
+        knn_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 10, _, _, _, _).
         
 
 %% Successful Tests
 
 test(knn_SearchWithQuery_Normal) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 2, NeighborsList, _, DistancesList, _),
+        knn_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 2, NeighborsList, _, DistancesList, _),
         print('\nNeighbors: '),
         print(NeighborsList),
         print('\nDistances: '),
@@ -171,38 +171,38 @@ test(knn_SearchWithQuery_Normal) :-
 test(knn_SearchWithQuery_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(kd, dual_tree, 0, 20, 0.7, 0.0, 0.0, Data, 4),
-        searchWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, 7, NeighborsList, _, DistancesList, _),
+        knn_initAndBuildModel(kd, dual_tree, 0, 20, 0.7, 0.0, 0.0, Data, 4),
+        knn_searchWithQuery([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2, 5, 5, 0, 2], 4, 7, NeighborsList, _, DistancesList, _),
         print('\nNeighbors: '),
         print(NeighborsList),
         print('\nDistances: '),
         print(DistancesList).
 
-:- end_tests(searchWithQuery).
+:- end_tests(knn_searchWithQuery).
 
 
 
 %%
-%% TESTING predicate searchNoQuery/10
+%% TESTING predicate knn_searchNoQuery/10
 %%
-:- begin_tests(searchNoQuery).      
+:- begin_tests(knn_searchNoQuery).      
 
 %% Failure Tests
                                             
 test(knn_SearchNoQuery_Negative_K, fail) :-
         reset_Model,
-        searchNoQuery(-1, _, _, _, _).
+        knn_searchNoQuery(-1, _, _, _, _).
 
 test(knn_SearchNoQuery_Too_Large_K, [error(_,system_error('Requested value of k (15) is greater than the number of points in the reference set (4)'))]) :-
         reset_Model,
-        searchNoQuery(15, _, _, _, _).
+        knn_searchNoQuery(15, _, _, _, _).
         
 
 %% Successful Tests
 
 test(knn_SearchNoQuery_Normal) :-
         reset_Model,
-        searchNoQuery(2, NeighborsList, _, DistancesList, _),
+        knn_searchNoQuery(2, NeighborsList, _, DistancesList, _),
         print('\nNeighbors: '),
         print(NeighborsList),
         print('\nDistances: '),
@@ -211,14 +211,14 @@ test(knn_SearchNoQuery_Normal) :-
 test(knn_SearchNoQuery_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initAndBuildModel(kd, dual_tree, 0, 20, 0.7, 0.0, 0.0, Data, 4),
-        searchNoQuery(7, NeighborsList, _, DistancesList, _),
+        knn_initAndBuildModel(kd, dual_tree, 0, 20, 0.7, 0.0, 0.0, Data, 4),
+        knn_searchNoQuery(7, NeighborsList, _, DistancesList, _),
         print('\nNeighbors: '),
         print(NeighborsList),
         print('\nDistances: '),
         print(DistancesList).
 
-:- end_tests(searchNoQuery).
+:- end_tests(knn_searchNoQuery).
 
 run_knn_tests :-
         run_tests.
diff --git a/src/methods/lars/lars.pl b/src/methods/lars/lars.pl
index a7449f565b9d1221410245c1f69749cc9e7ddd7f..d6ac26d39eae801a7cdd46b3fc6d29b3945117c0 100644
--- a/src/methods/lars/lars.pl
+++ b/src/methods/lars/lars.pl
@@ -1,16 +1,16 @@
 
-:- module(lars, [       initModelNoDataNoGram/4, 
-                        initModelNoDataWithGram/6, 
-                        initModelWithDataNoGram/8, 
-                        initModelWithDataWithGram/10, 
-                        activeSet/1, 
-                        beta/1, 
-                        betaPath/2, 
-                        computeError/5, 
-                        lambdaPath/1, 
-                        matUtriCholFactor/2, 
-                        predict/4, 
-                        train/6]).
+:- module(lars, [       lars_initModelNoDataNoGram/4, 
+                        lars_initModelNoDataWithGram/6, 
+                        lars_initModelWithDataNoGram/8, 
+                        lars_initModelWithDataWithGram/10, 
+                        lars_activeSet/1, 
+                        lars_beta/1, 
+                        lars_betaPath/2, 
+                        lars_computeError/5, 
+                        lars_lambdaPath/1, 
+                        lars_matUtriCholFactor/2, 
+                        lars_predict/4, 
+                        lars_train/6]).
 
 :- load_files(library(str_decl),
                 [when(compile_time), if(changed)]).
@@ -38,7 +38,7 @@
 %% --Description--
 %%              Only initialize the LARS model.
 %%
-initModelNoDataNoGram(UseCholesky, Lambda1, Lambda2, Tolerance) :-
+lars_initModelNoDataNoGram(UseCholesky, Lambda1, Lambda2, Tolerance) :-
         initModelNoDataNoGramI(UseCholesky, Lambda1, Lambda2, Tolerance).
 
 foreign(initModelNoDataNoGram, c, initModelNoDataNoGramI(+integer, 
@@ -58,7 +58,7 @@ foreign(initModelNoDataNoGram, c, initModelNoDataNoGramI(+integer,
 %% --Description--
 %%              Initialize LARS model, and pass in a precalculated Gram matrix but dont train the model.
 %%
-initModelNoDataWithGram(UseCholesky, GramList, GramRows, Lambda1, Lambda2, Tolerance) :-
+lars_initModelNoDataWithGram(UseCholesky, GramList, GramRows, Lambda1, Lambda2, Tolerance) :-
         convert_list_to_float_array(GramList, GramRows, array(Zsize, Zrownum, Z)),
         initModelNoDataWithGramI(UseCholesky, Z, Zsize, Zrownum, Lambda1, Lambda2, Tolerance).
 
@@ -81,7 +81,7 @@ foreign(initModelNoDataWithGram, c, initModelNoDataWithGramI(    +integer,
 %% --Description--
 %%              Initialize LARS model, and train the model.
 %%
-initModelWithDataNoGram(DataList, DataRows, ResponsesList, TransposeData, UseCholesky, Lambda1, Lambda2, Tolerance) :-
+lars_initModelWithDataNoGram(DataList, DataRows, ResponsesList, TransposeData, UseCholesky, Lambda1, Lambda2, Tolerance) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         initModelWithDataNoGramI(X, Xsize, Xrownum, Y, Ysize, TransposeData, UseCholesky, Lambda1, Lambda2, Tolerance).
@@ -109,7 +109,7 @@ foreign(initModelWithDataNoGram, c, initModelWithDataNoGramI(   +pointer(float_a
 %% --Description--
 %%              Initialize LARS model, pass in a precalculated Gram matrix and train the model.
 %%
-initModelWithDataWithGram(DataList, DataRows, ResponsesList, TransposeData, UseCholesky, GramList, GramRows, Lambda1, Lambda2, Tolerance) :-
+lars_initModelWithDataWithGram(DataList, DataRows, ResponsesList, TransposeData, UseCholesky, GramList, GramRows, Lambda1, Lambda2, Tolerance) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         convert_list_to_float_array(GramList, GramRows, array(Zsize, Zrownum, Z)),
@@ -132,7 +132,7 @@ foreign(initModelWithDataWithGram, c, initModelWithDataWithGramI(+pointer(float_
 %% --Description--
 %%              Get the set of active dimensions
 %%
-activeSet(ActiveSetList) :-
+lars_activeSet(ActiveSetList) :-
         activeSetI(Y, Ysize),
         convert_float_array_to_list(Y, Ysize, ActiveSetList).
 
@@ -147,7 +147,7 @@ foreign(activeSet, c, activeSetI(-pointer(float_array), -integer)).
 %% --Description--
 %%              Get the solution coefficients.
 %%
-beta(BetaList) :-
+lars_beta(BetaList) :-
         betaI(Y, Ysize),
         convert_float_array_to_list(Y, Ysize, BetaList).
 
@@ -162,7 +162,7 @@ foreign(beta, c, betaI(-pointer(float_array), -integer)).
 %% --Description--
 %%              Get the set of coefficients after each iteration. The solution is the last element.
 %%
-betaPath(BetaList, XCols) :-
+lars_betaPath(BetaList, XCols) :-
         betaPathI(X, XCols, XRows),
         convert_float_array_to_2d_list(X, XCols, XRows, BetaList).
 
@@ -180,7 +180,7 @@ foreign(betaPath, c, betaPathI(-pointer(float_array), -integer, -integer)).
 %% --Description--
 %%              Compute cost error of the given data matrix using the currently-trained LARS model.Only ||y-beta*X||2 is used to calculate cost error.
 %%
-computeError(DataList, DataRows, ResponsesList, RowMajor, Error) :-
+lars_computeError(DataList, DataRows, ResponsesList, RowMajor, Error) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         computeErrorI(X, Xsize, Xrownum, Y, Ysize, RowMajor, Error).
@@ -199,7 +199,7 @@ foreign(computeError, c, computeErrorI( +pointer(float_array), +integer, +intege
 %% --Description--
 %%              Get the set of values for lambda1 after each iteration; the solution is the last element.
 %%
-lambdaPath(LambdaPathList) :-
+lars_lambdaPath(LambdaPathList) :-
         lambdaPathI(Y, Ysize),
         convert_float_array_to_list(Y, Ysize, LambdaPathList).
 
@@ -214,7 +214,7 @@ foreign(lambdaPath, c, lambdaPathI(-pointer(float_array), -integer)).
 %% --Description--
 %%              Get the upper triangular cholesky factor.
 %%
-matUtriCholFactor(FactorList, XCols) :-
+lars_matUtriCholFactor(FactorList, XCols) :-
         matUtriCholFactorI(X, XCols, XRows),
         convert_float_array_to_2d_list(X, XCols, XRows, FactorList).
 
@@ -231,7 +231,7 @@ foreign(matUtriCholFactor, c, matUtriCholFactorI(-pointer(float_array), -integer
 %% --Description--
 %%              Predict y_i for each data point in the given data matrix using the currently-trained LARS model.
 %%
-predict(PointsList, PointsRows, PredicList, RowMajor) :-
+lars_predict(PointsList, PointsRows, PredicList, RowMajor) :-
         convert_list_to_float_array(PointsList, PointsRows, array(Xsize, Xrownum, X)),
         predictI(X, Xsize, Xrownum, Y, Ysize, RowMajor),
         convert_float_array_to_list(Y, Ysize, PredicList).
@@ -253,7 +253,7 @@ foreign(predict, c, predictI(   +pointer(float_array), +integer, +integer,
 %% --Description--
 %%              Train the LARS model with the given data.
 %%
-train(DataList, DataRows, ResponsesList, BetaList, RowMajor, Error) :-
+lars_train(DataList, DataRows, ResponsesList, BetaList, RowMajor, Error) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         trainI(X, Xsize, Xrownum, Y, Ysize, Z, Zsize, RowMajor, Error),
diff --git a/src/methods/lars/lars_test.pl b/src/methods/lars/lars_test.pl
index 23e55df2df19392e7f53f3e6c678e8f805a06e0c..67e2cb15dc3c71fd90e1e22b6522d674e6b9c052 100644
--- a/src/methods/lars/lars_test.pl
+++ b/src/methods/lars/lars_test.pl
@@ -9,10 +9,10 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model :-
-        initModelNoDataNoGram(1, 0.1, 0.3, 0.001),
+        lars_initModelNoDataNoGram(1, 0.1, 0.3, 0.001),
         convert_list_to_float_array([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5],3, array(Xsize, Xrownum, X)),
         convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
-        trainNoBetaReturn(X,Xsize, Xrownum,Y, Ysize, 1, _).
+        lars_trainNoBetaReturn(X,Xsize, Xrownum,Y, Ysize, 1, _).
 
 :- begin_tests(lists).
 
@@ -22,27 +22,27 @@ test(train, [true(A =:= 0)]) :-
         reset_Model,
         convert_list_to_float_array([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5],3, array(Xsize, Xrownum, X)),
         convert_list_to_float_array([0.2,0.2,0.2,0.2], array(Ysize, Y)),
-        trainNoBetaReturn(X,Xsize, Xrownum,Y, Ysize, 1, A),
-        predict(X, Xsize,Xrownum, Predic, PredicSize,0),
+        lars_trainNoBetaReturn(X,Xsize, Xrownum,Y, Ysize, 1, A),
+        lars_predict(X, Xsize,Xrownum, Predic, PredicSize,0),
         convert_float_array_to_list(Predic, PredicSize, Result),
         print(Result).
 
 test(activeSet) :-
         reset_Model,
-        activeSet(ActSet, ActSetSize),
+        lars_activeSet(ActSet, ActSetSize),
         convert_float_array_to_list(ActSet, ActSetSize, Result),
         print(Result).
 
 test(matUtriCholFactor) :-
         reset_Model,
-        matUtriCholFactor(Matrix, MatrixColNum, MatrixRowNum),
+        lars_matUtriCholFactor(Matrix, MatrixColNum, MatrixRowNum),
         print(MatrixColNum),
         convert_float_array_to_2d_list(Matrix, MatrixColNum, MatrixRowNum, Results),
         print(Results).
 
 test(betaPath) :-
         reset_Model,
-        betaPath(Matrix, MatrixColNum, MatrixRowNum),
+        lars_betaPath(Matrix, MatrixColNum, MatrixRowNum),
         print(MatrixColNum),
         convert_float_array_to_2d_list(Matrix, MatrixColNum, MatrixRowNum, Results),
         print(Results).
diff --git a/src/methods/linear_SVM/linear_SVM.pl b/src/methods/linear_SVM/linear_SVM.pl
index 996531079d10358bd08c1623dd55d7c3d1d0b2cc..6959fa49beb4bda3e8c4af977e9c4fb49ff4c5ad 100644
--- a/src/methods/linear_SVM/linear_SVM.pl
+++ b/src/methods/linear_SVM/linear_SVM.pl
@@ -1,10 +1,10 @@
 
-:- module(linear_SVM, [ initModelWithTrain/8,
-                        initModelNoTrain/4,
-                        classify/5,
-                        classifyPoint/2,
-                        computeAccuracy/4,
-                        train/6]).
+:- module(linear_SVM, [ linear_SVM_initModelWithTrain/8,
+                        linear_SVM_initModelNoTrain/4,
+                        linear_SVM_classify/5,
+                        linear_SVM_classifyPoint/2,
+                        linear_SVM_computeAccuracy/4,
+                        linear_SVM_train/6]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -36,7 +36,7 @@
 %% --Description--
 %%              Initializes the linear_svm model with the given data and trains it.
 %%
-initModelWithTrain(DataList, DataRows, LabelsList, NumClasses, Lambda, Delta, FitIntercept, Optimizer) :-
+linear_SVM_initModelWithTrain(DataList, DataRows, LabelsList, NumClasses, Lambda, Delta, FitIntercept, Optimizer) :-
         NumClasses >= 0,
         Lambda >= 0.0,
         Delta >= 0.0,
@@ -62,7 +62,7 @@ foreign(initModelWithTrain, c, initModelWithTrainI(     +pointer(float_array), +
 %% --Description--
 %%              Initializes the linear_svm model with the given data but doesnt train it.
 %%
-initModelNoTrain(NumClasses, Lambda, Delta, FitIntercept) :-
+linear_SVM_initModelNoTrain(NumClasses, Lambda, Delta, FitIntercept) :-
         NumClasses >= 0,
         Lambda >= 0.0,
         Delta >= 0.0,
@@ -82,7 +82,7 @@ foreign(initModelNoTrain, c, initModelNoTrainI( +integer, +float32, +float32,
 %% --Description--
 %%              Classify the given points, returning class scores and predicted class label for each point.
 %%
-classify(DataList, DataRows, LabelsList, ScoresList, ZCols) :-
+linear_SVM_classify(DataList, DataRows, LabelsList, ScoresList, ZCols) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         classifyI(X, Xsize, Xrows, Y, Ysize, Z, ZCols, ZRows),
         convert_float_array_to_list(Y, Ysize, LabelsList),
@@ -102,7 +102,7 @@ foreign(classify, c, classifyI( +pointer(float_array), +integer, +integer,
 %% --Description--
 %%              Classify the given point.
 %%
-classifyPoint(DataList, Prediction) :-
+linear_SVM_classifyPoint(DataList, Prediction) :-
         convert_list_to_float_array(DataList, array(Xsize, X)),
         classifyPointI(X, Xsize, Prediction).
 
@@ -121,7 +121,7 @@ foreign(classifyPoint, c, classifyPointI(       +pointer(float_array), +integer,
 %%              Computes accuracy of the learned model given the feature data and the labels associated with each data point.
 %%              Predictions are made using the provided data and are compared with the actual labels.
 %%
-computeAccuracy(DataList, DataRows, LabelsList, Accuracy) :-
+linear_SVM_computeAccuracy(DataList, DataRows, LabelsList, Accuracy) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(LabelsList, array(Ysize, Y)),
         computeAccuracyI(X, Xsize, Xrownum, Y, Ysize, Accuracy).
@@ -143,7 +143,7 @@ foreign(computeAccuracy, c, computeAccuracyI(   +pointer(float_array), +integer,
 %% --Description--
 %%              Train the Linear_svm model with the given training data.
 %%
-train(DataList, DataRows, LabelsList, NumClasses, Optimizer, ObjValue) :-
+linear_SVM_train(DataList, DataRows, LabelsList, NumClasses, Optimizer, ObjValue) :-
         NumClasses >= 0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(LabelsList, array(Ysize, Y)),
diff --git a/src/methods/linear_SVM/linear_SVM_test.pl b/src/methods/linear_SVM/linear_SVM_test.pl
index 9ddaa7301faeb1f4e184db6c66aee5a434badb79..3b973e57ff6987196546f9ce6ae1196d21389ea6 100644
--- a/src/methods/linear_SVM/linear_SVM_test.pl
+++ b/src/methods/linear_SVM/linear_SVM_test.pl
@@ -7,272 +7,272 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model_NoTrain :-
-        initModelNoTrain(2, 0.0001, 1.0, 0).
+        linear_SVM_initModelNoTrain(2, 0.0001, 1.0, 0).
 
 reset_Model_WithTrain :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, lbfgs).
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, lbfgs).
 
 :- begin_tests(lists).
 
 
 %%
-%% TESTING predicate initModelWithTrain/8
+%% TESTING predicate linear_SVM_initModelWithTrain/8
 %%
-:- begin_tests(initModelWithTrain).      
+:- begin_tests(linear_SVM_initModelWithTrain).      
 
 %% Failure Tests
                                             
 test(linear_SVM_InitModelWithTrain_Negative_NumClasses, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 0.0001, 1.0, 0, lbfgs).
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 0.0001, 1.0, 0, lbfgs).
 
 test(linear_SVM_InitModelWithTrain_Negative_Lambda, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, -0.0001, 1.0, 0, lbfgs).
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, -0.0001, 1.0, 0, lbfgs).
 
 test(linear_SVM_InitModelWithTrain_Negative_Delta, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, -1.0, 0, lbfgs).
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, -1.0, 0, lbfgs).
 
 test(linear_SVM_InitModelWithTrain_Wrong_Optimizer_Input, [error(domain_error('The given Optimizer is unkown!' , wrongInput), _)]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, wrongInput).
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, wrongInput).
 
 test(linear_SVM_InitModelWithTrain_Too_Few_Labels, [error(_,system_error('Error'))]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0.0001, 1.0, 0, lbfgs).
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0.0001, 1.0, 0, lbfgs).
 
 test(linear_SVM_InitModelWithTrain_Too_Many_Labels, [error(_,system_error('Error'))]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 0.0001, 1.0, 0, lbfgs).
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 0.0001, 1.0, 0, lbfgs).
 
 test(linear_SVM_InitModelWithTrain_Too_Many_LabelClasses, [error(_,system_error('Error'))]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 0.0001, 1.0, 0, lbfgs).
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 0.0001, 1.0, 0, lbfgs).
         
 
 %% Successful Tests
 
 test(linear_SVM_InitModelWithTrain_Normal_Use_LBFGS) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, lbfgs).
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, lbfgs).
 
 test(linear_SVM_InitModelWithTrain_Normal_Use_PSGD) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, psgd).
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, psgd).
 
 test(linear_SVM_InitModelWithTrain_CSV_Inpupt_LBFGS) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0.000021, 2.0, 1, lbfgs).
+        linear_SVM_initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0.000021, 2.0, 1, lbfgs).
 
-:- end_tests(initModelWithTrain).
+:- end_tests(linear_SVM_initModelWithTrain).
 
 
 
 %%
-%% TESTING predicate initModelNoTrain/4
+%% TESTING predicate linear_SVM_initModelNoTrain/4
 %%
-:- begin_tests(initModelNoTrain).      
+:- begin_tests(linear_SVM_initModelNoTrain).      
 
 %% Failure Tests
                                             
 test(linear_SVM_InitModelNoTrain_Negative_NumClasses, fail) :-
-        initModelNoTrain(-2, 0.0001, 1.0, 0).
+        linear_SVM_initModelNoTrain(-2, 0.0001, 1.0, 0).
 
 test(linear_SVM_InitModelNoTrain_Negative_Lambda, fail) :-
-        initModelNoTrain(2, -0.0001, 1.0, 0).
+        linear_SVM_initModelNoTrain(2, -0.0001, 1.0, 0).
 
 test(linear_SVM_InitModelNoTrain_Negative_Delta, fail) :-
-        initModelNoTrain(2, 0.0001, -1.0, 0).
+        linear_SVM_initModelNoTrain(2, 0.0001, -1.0, 0).
         
 
 %% Successful Tests
 
 test(linear_SVM_InitModelNoTrain_Normal_Use) :-
-        initModelNoTrain(2, 0.0001, 1.0, 0).
+        linear_SVM_initModelNoTrain(2, 0.0001, 1.0, 0).
 
 test(linear_SVM_InitModelNoTrain_Alternative) :-
-        initModelNoTrain(2, 0.042, 0.5, 1).
+        linear_SVM_initModelNoTrain(2, 0.042, 0.5, 1).
 
-:- end_tests(initModelNoTrain).
+:- end_tests(linear_SVM_initModelNoTrain).
 
 
 
 %%
-%% TESTING predicate classify/5
+%% TESTING predicate linear_SVM_classify/5
 %%
-:- begin_tests(classify).      
+:- begin_tests(linear_SVM_classify).      
 
 %% Failure Tests
                                             
 test(linear_SVM_Classify_Before_Train, [error(_,system_error('Error'))]) :-
         reset_Model_NoTrain,
-        classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _).
+        linear_SVM_classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _).
 
 test(linear_SVM_Classify_Diffrent_Dims_Than_Train, [error(_,system_error('Error'))]) :-
         reset_Model_WithTrain,
-        classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _, _, _).
+        linear_SVM_classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _, _, _).
         
 
 %% Successful Tests
 
 test(linear_SVM_Classify_Normal_Use_LBFGS) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, lbfgs),
-        classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, LabelsList, ScoresList, _),
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, lbfgs),
+        linear_SVM_classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, LabelsList, ScoresList, _),
         print('\nLabels: '),
         print(LabelsList),
         print('\nScores: '),
         print(ScoresList).
 
 test(linear_SVM_Classify_Normal_Use_PSGD) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, psgd),
-        classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, LabelsList, ScoresList, _),
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, psgd),
+        linear_SVM_classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, LabelsList, ScoresList, _),
         print('\nLabels: '),
         print(LabelsList),
         print('\nScores: '),
         print(ScoresList).
 
 test(linear_SVM_Classify_CSV_Input) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 2, 0.0001, 2.0, 1, lbfgs),
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 2, 0.0001, 2.0, 1, lbfgs),
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        classify(Data, 4, LabelsList, ScoresList, _),
+        linear_SVM_classify(Data, 4, LabelsList, ScoresList, _),
         print('\nLabels: '),
         print(LabelsList),
         print('\nScores: '),
         print(ScoresList).
 
-:- end_tests(classify).
+:- end_tests(linear_SVM_classify).
 
 
 
 %%
-%% TESTING predicate classifyPoint/2
+%% TESTING predicate linear_SVM_classifyPoint/2
 %%
-:- begin_tests(classifyPoint).      
+:- begin_tests(linear_SVM_classifyPoint).      
 
 %% Failure Tests
                                             
 test(linear_SVM_ClassifyPoint_Before_Train, [error(_,system_error('Error'))]) :-
         reset_Model_NoTrain,
-        classifyPoint([5.1,3.5,1.4], _).
+        linear_SVM_classifyPoint([5.1,3.5,1.4], _).
 
 test(linear_SVM_ClassifyPoint_Diffrent_Dims_Than_Train, [error(_,system_error('Error'))]) :-
         reset_Model_WithTrain,
-        classifyPoint([5.1,3.5,1.4,4.9], _).
+        linear_SVM_classifyPoint([5.1,3.5,1.4,4.9], _).
         
 
 %% Successful Tests
 
 test(linear_SVM_ClassifyPoint_Normal_Use_LBFGS) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, lbfgs),
-        classifyPoint([5.1,3.5,1.4], Label),
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, lbfgs),
+        linear_SVM_classifyPoint([5.1,3.5,1.4], Label),
         print('\nLabel: '),
         print(Label).
 
 test(linear_SVM_ClassifyPoint_Normal_Use_PSGD) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, psgd),
-        classifyPoint([5.1,3.5,1.4], Label),
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, psgd),
+        linear_SVM_classifyPoint([5.1,3.5,1.4], Label),
         print('\nLabel: '),
         print(Label).
 
 test(linear_SVM_ClassifyPoint_CSV_Input) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 2, 0.0001, 2.0, 1, lbfgs),
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 2, 0.0001, 2.0, 1, lbfgs),
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,1, Data),
-        classifyPoint(Data, Label),
+        linear_SVM_classifyPoint(Data, Label),
         print('\nLabel: '),
         print(Label).
 
-:- end_tests(classifyPoint).
+:- end_tests(linear_SVM_classifyPoint).
 
 
 
 %%
-%% TESTING predicate computeAccuracy/4
+%% TESTING predicate linear_SVM_computeAccuracy/4
 %%
-:- begin_tests(computeAccuracy).      
+:- begin_tests(linear_SVM_computeAccuracy).      
 
 %% Failure Tests
 
 test(linear_SVM_ComputeAccuracy_Before_Train, [error(_,system_error('Error'))]) :-
         reset_Model_NoTrain,
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], _).
+        linear_SVM_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], _).
 
 test(linear_SVM_ComputeAccuracy_Too_Few_Labels, [error(_,system_error('Error'))]) :-
         reset_Model_WithTrain,
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], _).
+        linear_SVM_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], _).
 
 test(linear_SVM_ComputeAccuracy_Too_Many_Labels, [error(_,system_error('Error'))]) :-
         reset_Model_WithTrain,
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], _).
+        linear_SVM_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], _).
 
 test(linear_SVM_ComputeAccuracy_Too_Many_LabelClasses, [error(_,system_error('Error'))]) :-
         reset_Model_WithTrain,
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], _).
+        linear_SVM_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], _).
 
 test(linear_SVM_ComputeAccuracy_Wrong_Data_Dims, [error(_,system_error('Error'))]) :-
         reset_Model_WithTrain,
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], _).
+        linear_SVM_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], _).
         
 
 %% Successful Tests
 
 test(linear_SVM_ComputeAccuracy_Normal_Use_LBFGS) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, lbfgs),
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], Accuracy),
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, lbfgs),
+        linear_SVM_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], Accuracy),
         print('\nAccuracy: '),
         print(Accuracy).
 
 test(linear_SVM_ComputeAccuracy_Normal_Use_PSGD) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, psgd),
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], Accuracy),
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 1.0, 0, psgd),
+        linear_SVM_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], Accuracy),
         print('\nAccuracy: '),
         print(Accuracy).
 
 test(linear_SVM_ComputeAccuracy_CSV_Input) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 2, 0.0001, 2.0, 1, lbfgs),
+        linear_SVM_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 2, 0.0001, 2.0, 1, lbfgs),
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,1, Data),
-        computeAccuracy(Data, 4, [0,1,0,1,1,0,1,1,1,0], Accuracy),
+        linear_SVM_computeAccuracy(Data, 4, [0,1,0,1,1,0,1,1,1,0], Accuracy),
         print('\nAccuracy: '),
         print(Accuracy).
 
-:- end_tests(computeAccuracy).
+:- end_tests(linear_SVM_computeAccuracy).
 
 
 
 %%
-%% TESTING predicate train/6
+%% TESTING predicate linear_SVM_train/6
 %%
-:- begin_tests(train).      
+:- begin_tests(linear_SVM_train).      
 
 %% Failure Tests
                                             
 test(linear_SVM_Train_Negative_NumClasses, fail) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, lbfgs, _).
+        linear_SVM_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, lbfgs, _).
 
 test(linear_SVM_Train_Wrong_Optimizer_Input, [error(domain_error('The given Optimizer is unkown!' , wrongInput), _)]) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, wrongInput, _).
+        linear_SVM_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, wrongInput, _).
 
 test(linear_SVM_Train_Too_Few_Labels, [error(_,system_error('Error'))]) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, lbfgs, _).
+        linear_SVM_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, lbfgs, _).
 
 test(linear_SVM_Train_Too_Many_Labels, [error(_,system_error('Error'))]) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, lbfgs, _).
+        linear_SVM_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, lbfgs, _).
 
 test(linear_SVM_Train_Too_Many_LabelClasses, [error(_,system_error('Error'))]) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, lbfgs, _).
+        linear_SVM_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, lbfgs, _).
         
 
 %% Successful Tests
 
 test(linear_SVM_Train_Normal_Use_LBFGS) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, lbfgs, ObjectiveValue),
+        linear_SVM_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, lbfgs, ObjectiveValue),
         print('\nObjectiveValue: '),
         print(ObjectiveValue).
 
 test(linear_SVM_Train_Normal_Use_PSGD) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, psgd, ObjectiveValue),
+        linear_SVM_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, psgd, ObjectiveValue),
         print('\nObjectiveValue: '),
         print(ObjectiveValue).
 
@@ -280,11 +280,11 @@ test(linear_SVM_Train_CSV_Inpupt_LBFGS) :-
         reset_Model_NoTrain,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, lbfgs, ObjectiveValue),
+        linear_SVM_train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, lbfgs, ObjectiveValue),
         print('\nObjectiveValue: '),
         print(ObjectiveValue).
 
-:- end_tests(train).
+:- end_tests(linear_SVM_train).
 
 run_linear_SVM_tests :-
         run_tests.
diff --git a/src/methods/linear_regression/linear_regression.pl b/src/methods/linear_regression/linear_regression.pl
index cb85d07b3256b1e9e766e7391f749762f0c4dafb..dfc1ff849c7841cb6b5f945ceab7c2e8424cca9b 100644
--- a/src/methods/linear_regression/linear_regression.pl
+++ b/src/methods/linear_regression/linear_regression.pl
@@ -1,12 +1,12 @@
 
-:- module(linear_regression, [  initModel/5, 
-                                initModelWithWeights/6, 
-                                computeError/4, 
-                                parameters/1, 
-                                modifyParameters/1, 
-                                predict/3, 
-                                train/5, 
-                                trainWithWeights/6]).
+:- module(linear_regression, [  linear_regression_initModel/5, 
+                                linear_regression_initModelWithWeights/6, 
+                                linear_regression_computeError/4, 
+                                linear_regression_parameters/1, 
+                                linear_regression_modifyParameters/1, 
+                                linear_regression_predict/3, 
+                                linear_regression_train/5, 
+                                linear_regression_trainWithWeights/6]).
 
 :- load_files(library(str_decl),
                 [when(compile_time), if(changed)]).
@@ -34,7 +34,7 @@
 %% --Description--
 %%              Initializes the linear_regression model and trains it but doesnt include weights.
 %%
-initModel(DataList, DataRows, ResponsesList, Lambda, Intercept) :-
+linear_regression_initModel(DataList, DataRows, ResponsesList, Lambda, Intercept) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         initModelI(X, Xsize, Xrownum, Y, Ysize, Lambda, Intercept).
@@ -57,7 +57,7 @@ foreign(initModel,  c, initModelI(      +pointer(float_array), +integer, +intege
 %% --Description--
 %%              Initializes the linear_regression model, trains it and adds weights to it.
 %%
-initModelWithWeights(DataList, DataRows, ResponsesList, WeightsList, Lambda, Intercept) :-
+linear_regression_initModelWithWeights(DataList, DataRows, ResponsesList, WeightsList, Lambda, Intercept) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         convert_list_to_float_array(WeightsList, array(Zsize, Z)),
@@ -80,7 +80,7 @@ foreign(initModelWithWeights,  c, initModelWithWeightsI(        +pointer(float_a
 %% --Description--
 %%              Calculate the L2 squared error on the given predictors and responses using this linear regression model.
 %%
-computeError(DataList, DataRows, ResponsesList, Error) :-
+linear_regression_computeError(DataList, DataRows, ResponsesList, Error) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         computeErrorI(X, Xsize, Xrownum, Y, Ysize, Error).
@@ -98,7 +98,7 @@ foreign(computeError,  c, computeErrorI(+pointer(float_array), +integer, +intege
 %% --Description--
 %%              Get the parameters (the b vector).
 %%
-parameters(ParametersList) :-
+linear_regression_parameters(ParametersList) :-
         parametersI(Y, Ysize),
         convert_float_array_to_list(Y, Ysize, ParametersList).
 
@@ -113,7 +113,7 @@ foreign(parameters, c, parametersI(-pointer(float_array), -integer)).
 %% --Description--
 %%              Modify the parameters (the b vector).
 %%
-modifyParameters(ParameterList) :-
+linear_regression_modifyParameters(ParameterList) :-
         convert_list_to_float_array(ParameterList, array(Ysize, Y)),
         modifyParametersI(Y, Ysize).
 
@@ -129,7 +129,7 @@ foreign(modifyParameters,  c, modifyParametersI(+pointer(float_array), +integer)
 %% --Description--
 %%              Calculate y_i for each data point in points.
 %%
-predict(PointsList, PointsRows, PredicList) :-
+linear_regression_predict(PointsList, PointsRows, PredicList) :-
         convert_list_to_float_array(PointsList, PointsRows, array(Xsize, Xrownum, X)),
         predictI(X, Xsize, Xrownum, Y, Ysize),
         convert_float_array_to_list(Y, Ysize, PredicList).
@@ -151,7 +151,7 @@ foreign(predict,  c, predictI(  +pointer(float_array), +integer, +integer,
 %%              Careful! 
 %%              This will completely ignore and overwrite the existing model. This particular implementation does not have an incremental training algorithm.
 %%
-train(DataList, DataRows, ResponsesList, Intercept, Error) :-
+linear_regression_train(DataList, DataRows, ResponsesList, Intercept, Error) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         trainI(X, Xsize, Xrownum, Y, Ysize, Intercept, Error).
@@ -176,7 +176,7 @@ foreign(train,  c, trainI(      +pointer(float_array), +integer, +integer,
 %%              Careful! 
 %%              This will completely ignore and overwrite the existing model. This particular implementation does not have an incremental training algorithm.
 %%
-trainWithWeights(DataList, DataRows, ResponsesList, WeightsList, Intercept, Error) :-
+linear_regression_trainWithWeights(DataList, DataRows, ResponsesList, WeightsList, Intercept, Error) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         convert_list_to_float_array(WeightsList, array(Zsize, Z)),
diff --git a/src/methods/local_coordinate_coding/local_coordinate_coding.pl b/src/methods/local_coordinate_coding/local_coordinate_coding.pl
index 9c7d2be226d054bb10bc91d2e901df04ba253f06..97c45803f284144917941ba1d9aae9def3171a05 100644
--- a/src/methods/local_coordinate_coding/local_coordinate_coding.pl
+++ b/src/methods/local_coordinate_coding/local_coordinate_coding.pl
@@ -1,10 +1,10 @@
 
-:- module(local_coordinate_coding, [    initModelWithTrain/7,
-                                        initModelNoTrain/5,
-                                        encode/4,
-                                        objective/5,
-                                        optimizeDictionary/5,
-                                        train/3]).
+:- module(local_coordinate_coding, [    lcc_initModelWithTrain/7,
+                                        lcc_initModelNoTrain/5,
+                                        lcc_encode/4,
+                                        lcc_objective/5,
+                                        lcc_optimizeDictionary/5,
+                                        lcc_train/3]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -35,7 +35,7 @@
 %% --Description--
 %%              Initializes the model and trains it so encode/6 can be called after
 %%
-initModelWithTrain(DataList, DataRows, Normalize, Atoms, Lambda, MaxIterations, Tolerance) :-
+lcc_initModelWithTrain(DataList, DataRows, Normalize, Atoms, Lambda, MaxIterations, Tolerance) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         initModelWithTrainI(X, Xsize, Xrownum, Normalize, Atoms, Lambda, MaxIterations, Tolerance).
 
@@ -56,7 +56,7 @@ foreign(initModelWithTrain, c, initModelWithTrainI(     +pointer(float_array), +
 %% --Description--
 %%              Initializes the model but doesnt train it so train/4 has to be called befor encode/6 can be used.
 %%
-initModelNoTrain(Normalize, Atoms, Lambda, MaxIterations, Tolerance) :-
+lcc_initModelNoTrain(Normalize, Atoms, Lambda, MaxIterations, Tolerance) :-
         initModelNoTrainI(Normalize, Atoms, Lambda, MaxIterations, Tolerance).
 
 foreign(initModelNoTrain, c, initModelNoTrainI( +integer, 
@@ -72,7 +72,7 @@ foreign(initModelNoTrain, c, initModelNoTrainI( +integer,
 %% --Description--
 %%              Code each point via distance-weighted LARS.
 %%
-encode(DataList, DataRows, CodesList, YCols) :-
+lcc_encode(DataList, DataRows, CodesList, YCols) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         encodeI(X, Xsize, Xrows, Y, YCols, YRows),
         convert_float_array_to_2d_list(Y, YCols, YRows, CodesList).
@@ -91,7 +91,7 @@ foreign(encode, c, encodeI(     +pointer(float_array), +integer, +integer,
 %% --Description--
 %%              Compute objective function given the list of adjacencies.
 %%
-objective(DataList, DataRows, CodesList, ZCols, AdjacenciesList) :-
+lcc_objective(DataList, DataRows, CodesList, YCols, AdjacenciesList) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         objectiveI(X, Xsize, Xrows, Y, YCols, YRows, Z, Zsize),
         convert_float_array_to_2d_list(Y, YCols, YRows, CodesList),
@@ -112,7 +112,7 @@ foreign(objective, c, objectiveI(       +pointer(float_array), +integer, +intege
 %% --Description--
 %%              Learn dictionary by solving linear system.
 %%
-optimizeDictionary(DataList, DataRows, CodesList, ZCols, AdjacenciesList) :-
+lcc_optimizeDictionary(DataList, DataRows, CodesList, YCols, AdjacenciesList) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         optimizeDictionaryI(X, Xsize, Xrows, Y, YCols, YRows, Z, Zsize),
         convert_float_array_to_2d_list(Y, YCols, YRows, CodesList),
@@ -132,7 +132,7 @@ foreign(optimizeDictionary, c, optimizeDictionaryI(     +pointer(float_array), +
 %% --Description--
 %%              Run local coordinate coding and train the model.
 %%
-train(DataList, DataRows, ReturnValue) :-
+lcc_train(DataList, DataRows, ReturnValue) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         trainI(X, Xsize, Xrows, ReturnValue).
 
diff --git a/src/methods/local_coordinate_coding/local_coordinate_coding_test.pl b/src/methods/local_coordinate_coding/local_coordinate_coding_test.pl
index 5fe6b237aed5dd8d06ff3ad85db406abf8a99f84..0bdb5de3ca20b01aea1f96541b7685700f72fc57 100644
--- a/src/methods/local_coordinate_coding/local_coordinate_coding_test.pl
+++ b/src/methods/local_coordinate_coding/local_coordinate_coding_test.pl
@@ -7,7 +7,7 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model :-
-        initModel(1,0,50,0.0001).
+        lcc_initModel(1,0,50,0.0001).
 
 %%
 %% TESTING predicate predicate/10
diff --git a/src/methods/logistic_regression/logistic_regression.pl b/src/methods/logistic_regression/logistic_regression.pl
index 86228c50007a5c6c0a67326dfcbf866c7fa30bfd..ecdc87b513dc2ae77c1fd008a21ab5993feb9c45 100644
--- a/src/methods/logistic_regression/logistic_regression.pl
+++ b/src/methods/logistic_regression/logistic_regression.pl
@@ -1,11 +1,11 @@
 
-:- module(logistic_regression, [        initModelNoOptimizer/4,
-                                        initModelWithOptimizer/5,
-                                        classifyPoint/3,
-                                        classifyMatrix/6,
-                                        computeAccuracy/5,
-                                        computeError/4,
-                                        train/4]).
+:- module(logistic_regression, [        logistic_regression_initModelNoOptimizer/4,
+                                        logistic_regression_initModelWithOptimizer/5,
+                                        logistic_regression_classifyPoint/3,
+                                        logistic_regression_classifyMatrix/6,
+                                        logistic_regression_computeAccuracy/5,
+                                        logistic_regression_computeError/4,
+                                        logistic_regression_train/4]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -33,7 +33,7 @@
 %% --Description--
 %%              Initialize the logistic_regression model without specifing a optimizer.
 %%
-initModelNoOptimizer(DataList, DataRows, ResponsesList, Lambda) :-
+logistic_regression_initModelNoOptimizer(DataList, DataRows, ResponsesList, Lambda) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         initModelNoOptimizerI(X, Xsize, Xrownum, Y, Ysize, Lambda).
@@ -54,7 +54,7 @@ foreign(initModelNoOptimizer, c, initModelNoOptimizerI( +pointer(float_array), +
 %% --Description--
 %%              Initialize the logistic_regression model and specify the optimizer.
 %%
-initModelWithOptimizer(DataList, DataRows, ResponsesList, Responses, Lambda) :-
+logistic_regression_initModelWithOptimizer(DataList, DataRows, ResponsesList, Responses, Lambda) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         initModelWithOptimizerI(X, Xsize, Xrownum, Y, Ysize, Responses, Lambda).
@@ -75,7 +75,7 @@ foreign(initModelWithOptimizer, c, initModelWithOptimizerI(     +pointer(float_a
 %% --Description--
 %%              Classify the given point.
 %%
-classifyPoint(PointList, DecisionBoundary, PredicLabel) :-
+logistic_regression_classifyPoint(PointList, DecisionBoundary, PredicLabel) :-
         convert_list_to_float_array(PointList, array(Xsize, X)),
         classifyPointI(X, Xsize, DecisionBoundary, PredicLabel).
 
@@ -95,7 +95,7 @@ foreign(classifyPoint, c, classifyPointI(       +pointer(float_array), +integer,
 %% --Description--
 %%              Classify the given points, returning the predicted labels for each point.
 %%
-classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols, DecisionBoundary) :-
+logistic_regression_classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols, DecisionBoundary) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         classifyMatrixI(X, Xsize, Xrows, Y, Ysize, Z, ZCols, ZRows, DecisionBoundary),
         convert_float_array_to_list(Y, Ysize, PredictionList),
@@ -118,7 +118,7 @@ foreign(classifyMatrix, c, classifyMatrixI(     +pointer(float_array), +integer,
 %% --Description--
 %%              Compute the accuracy of the model on the given predictors and responses, using the given decision boundary.
 %%
-computeAccuracy(DataList, DataRows, ResponsesList, DecisionBoundary, Accuracy) :-
+logistic_regression_computeAccuracy(DataList, DataRows, ResponsesList, DecisionBoundary, Accuracy) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         computeAccuracyI(X, Xsize, Xrownum, Y, Ysize, DecisionBoundary, Accuracy).
@@ -139,7 +139,7 @@ foreign(computeAccuracy, c, computeAccuracyI(   +pointer(float_array), +integer,
 %% --Description--
 %%              Compute the error of the model.
 %%
-computeError(DataList, DataRows, ResponsesList, Error) :-
+logistic_regression_computeError(DataList, DataRows, ResponsesList, Error) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         computeErrorI(X, Xsize, Xrownum, Y, Ysize, Error).
@@ -159,7 +159,7 @@ foreign(computeError, c, computeErrorI( +pointer(float_array), +integer, +intege
 %% --Description--
 %%              Train the logistic_regression model on the given input data.
 %%
-train(DataList, DataRows, ResponsesList, Optimizer) :-
+logistic_regression_train(DataList, DataRows, ResponsesList, Optimizer) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(ResponsesList, array(Ysize, Y)),
         trainI(X, Xsize, Xrownum, Y, Ysize, Optimizer).
diff --git a/src/methods/logistic_regression/logistic_regression_test.pl b/src/methods/logistic_regression/logistic_regression_test.pl
index da8651e190a62ec57928d02c557e53dc7ad83518..f7d7fac2b850cab006a5a71b703ba2fb8ff46e7e 100644
--- a/src/methods/logistic_regression/logistic_regression_test.pl
+++ b/src/methods/logistic_regression/logistic_regression_test.pl
@@ -7,7 +7,7 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model :-
-        initModel(1,0,50,0.0001).
+        logistic_regression_initModel(1,0,50,0.0001).
 
 %%
 %% TESTING predicate predicate/10
diff --git a/src/methods/lsh/lsh.pl b/src/methods/lsh/lsh.pl
index 0f6f40da06d02a73c6773efd8ef2a1f1b94a9954..d0df6d303a99f1f90cd505ecaeb72ca5001e523b 100644
--- a/src/methods/lsh/lsh.pl
+++ b/src/methods/lsh/lsh.pl
@@ -1,8 +1,8 @@
 
-:- module(lsh, [initAndTrainModel/7,
-                computeRecall/5,
-                searchWithQuery/9,
-                searchNoQuery/7]).
+:- module(lsh, [lsh_initAndTrainModel/7,
+                lsh_computeRecall/5,
+                lsh_searchWithQuery/9,
+                lsh_searchNoQuery/7]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -33,7 +33,7 @@
 %% --Description--
 %%              Initiatzes the model and trains it.
 %%
-initAndTrainModel(ReferenceList, ReferenceRows, NumProj, NumTables, HashWidth, SecondHashSize, BucketSize) :-
+lsh_initAndTrainModel(ReferenceList, ReferenceRows, NumProj, NumTables, HashWidth, SecondHashSize, BucketSize) :-
         NumProj >= 0,
         NumTables >= 0,
         HashWidth >= 0.0,
@@ -56,7 +56,7 @@ foreign(initAndTrainModel, c, initAndTrainModelI(+pointer(float_array), +integer
 %% --Description--
 %%              Compute the recall (% of neighbors found) given the neighbors returned by searchWithQuery/12 or searchNoQuery/9 and a "ground truth" set of neighbors.
 %%
-computeRecall(FoundNeighborsList, FoundNeighborsRows, RealNeighborsList, RealNeighborsRows, Percentage) :-
+lsh_computeRecall(FoundNeighborsList, FoundNeighborsRows, RealNeighborsList, RealNeighborsRows, Percentage) :-
         convert_list_to_float_array(FoundNeighborsList, FoundNeighborsRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(RealNeighborsList, RealNeighborsRows, array(Ysize, Yrownum, Y)),
         computeRecallI(X, Xsize, Xrownum, Y, Ysize, Yrownum, Percentage).
@@ -80,7 +80,7 @@ foreign(computeRecall, c, computeRecallI(+pointer(float_array), +integer, +integ
 %%              Compute the nearest neighbors of the points in the given query set and store the output in the given matrices.
 %%              The matrices will be set to the size of n columns by k rows, where n is the number of points in the query dataset and k is the number of neighbors being searched for.
 %%
-searchWithQuery(QueryList, QueryRows, K, ResultingNeighborsList, YCols, DistancesList, ZCols, NumTablesToSearch, T) :-
+lsh_searchWithQuery(QueryList, QueryRows, K, ResultingNeighborsList, YCols, DistancesList, ZCols, NumTablesToSearch, T) :-
         K > 0,
         NumTablesToSearch >= 0,
         T >= 0,
@@ -109,7 +109,7 @@ foreign(searchWithQuery, c, searchWithQueryI(   +pointer(float_array), +integer,
 %%              Compute the nearest neighbors and store the output in the given matrices.
 %%              The matrices will be set to the size of n columns by k rows, where n is the number of points in the query dataset and k is the number of neighbors being searched for.
 %%
-searchNoQuery(K, ResultingNeighborsList, YCols, DistancesList, ZCols, NumTablesToSearch, T) :-
+lsh_searchNoQuery(K, ResultingNeighborsList, YCols, DistancesList, ZCols, NumTablesToSearch, T) :-
         K > 0,
         NumTablesToSearch >= 0,
         T >= 0,
diff --git a/src/methods/lsh/lsh_test.pl b/src/methods/lsh/lsh_test.pl
index d5088a5bea02a75fae36f73466a1e8a98d343d81..5d3a0cc4c08d6bcd64226d2b5ee0413c15e6401a 100644
--- a/src/methods/lsh/lsh_test.pl
+++ b/src/methods/lsh/lsh_test.pl
@@ -7,146 +7,146 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model :-
-        initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 20, 10, 0.0, 99901, 500).
+        lsh_initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 20, 10, 0.0, 99901, 500).
 
 %%
-%% TESTING predicate initAndTrainModel/7
+%% TESTING predicate lsh_initAndTrainModel/7
 %%
-:- begin_tests(initAndTrainModel).      
+:- begin_tests(lsh_initAndTrainModel).      
 
 %% Failure Tests
                                             
 
 test(lsh_InitAndTrainModel_Negative_NumProj, fail) :-
-        initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, -20, 10, 0.0, 99901, 500).
+        lsh_initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, -20, 10, 0.0, 99901, 500).
 
 test(lsh_InitAndTrainModel_Negative_NumTables, fail) :-
-        initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 20, -10, 0.0, 99901, 500).
+        lsh_initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 20, -10, 0.0, 99901, 500).
 
 test(lsh_InitAndTrainModel_Negative_HashWidth, fail) :-
-        initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 20, 10, -1.0, 99901, 500).
+        lsh_initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 20, 10, -1.0, 99901, 500).
 
 test(lsh_InitAndTrainModel_Negative_SecondHashSize, fail) :-
-        initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 20, 10, 0.0, -99901, 500).
+        lsh_initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 20, 10, 0.0, -99901, 500).
 
 test(lsh_InitAndTrainModel_Negative_BucketSize, fail) :-
-        initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 20, 10, 0.0, 99901, -500).
+        lsh_initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 20, 10, 0.0, 99901, -500).
         
 
 %% Successful Tests
 
 test(lsh_InitAndTrainModel_Normal_Use) :-
-        initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 20, 10, 0.0, 99901, 500).
+        lsh_initAndTrainModel([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 20, 10, 0.0, 99901, 500).
 
 test(lsh_InitAndTrainModel_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, 25, 15, 1.5, 99901, 200).
+        lsh_initAndTrainModel(Data, 4, 25, 15, 1.5, 99901, 200).
 
-:- end_tests(initAndTrainModel).
+:- end_tests(lsh_initAndTrainModel).
 
 
 
 %%
-%% TESTING predicate computeRecall/5
+%% TESTING predicate lsh_computeRecall/5
 %%
-:- begin_tests(computeRecall).      
+:- begin_tests(lsh_computeRecall).      
 
 %% Failure Tests
                                             
 test(lsh_ComputeRecall_Wrong_Dimensions, [error(_, system_error('Error'))]) :-
         reset_Model,
-        computeRecall([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _).
+        lsh_computeRecall([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _).
         
 
 %% Successful Tests
 
 test(lsh_ComputeRecall_Normal_Use) :-
         reset_Model,
-        computeRecall([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _).
+        lsh_computeRecall([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _).
 
 
-:- end_tests(computeRecall).
+:- end_tests(lsh_computeRecall).
 
 
 
 %%
-%% TESTING predicate searchWithQuery/10
+%% TESTING predicate lsh_searchWithQuery/10
 %%
-:- begin_tests(searchWithQuery).      
+:- begin_tests(lsh_searchWithQuery).      
 
 %% Failure Tests
                                             
 test(lsh_SearchWithQuery_Wrong_Dimensions, [error(_, system_error('Error'))]) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, 3, _, _, _, _, 0, 0).
+        lsh_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, 3, _, _, _, _, 0, 0).
 
 test(lsh_SearchWithQuery_Too_High_K, [error(_, system_error('Error'))]) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 10, _, _, _, _, 0, 0).
+        lsh_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 10, _, _, _, _, 0, 0).
 
 test(lsh_SearchWithQuery_Negative_K, fail) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, -3, _, _, _, _, 0, 0).
+        lsh_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, -3, _, _, _, _, 0, 0).
 
 test(lsh_SearchWithQuery_Negative_NumTablesToSearch, fail) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 3, _, _, _, _, -10, 0).
+        lsh_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 3, _, _, _, _, -10, 0).
 
 test(lsh_SearchWithQuery_Negative_T, fail) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 3, _, _, _, _, 0, -10).
+        lsh_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 3, _, _, _, _, 0, -10).
         
 
 %% Successful Tests
 
 test(lsh_SearchWithQuery_Normal_Use) :-
         reset_Model,
-        searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 3, NeighborResultsList, _, DistancesList, _, 0, 0),
+        lsh_searchWithQuery([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 3, NeighborResultsList, _, DistancesList, _, 0, 0),
         print('\nNeighborResults: '),
         print(NeighborResultsList),
         print('\nDistances: '),
         print(DistancesList).
 
-:- end_tests(searchWithQuery).
+:- end_tests(lsh_searchWithQuery).
 
 
 
 %%
-%% TESTING predicate searchNoQuery/10
+%% TESTING predicate lsh_searchNoQuery/10
 %%
-:- begin_tests(searchNoQuery).      
+:- begin_tests(lsh_searchNoQuery).      
 
 %% Failure Tests
 
 test(lsh_SearchNoQuery_Too_High_K, [error(_, system_error('Error'))]) :-
         reset_Model,
-        searchNoQuery(10, _, _, _, _, 0, 0).
+        lsh_searchNoQuery(10, _, _, _, _, 0, 0).
 
 test(lsh_SearchNoQuery_Negative_K, fail) :-
         reset_Model,
-        searchNoQuery(-3, _, _, _, _, 0, 0).
+        lsh_searchNoQuery(-3, _, _, _, _, 0, 0).
 
 test(lsh_SearchNoQuery_Negative_NumTablesToSearch, fail) :-
         reset_Model,
-        searchNoQuery(3, _, _, _, _, -10, 0).
+        lsh_searchNoQuery(3, _, _, _, _, -10, 0).
 
 test(lsh_SearchNoQuery_Negative_T, fail) :-
         reset_Model,
-        searchNoQuery(3, _, _, _, _, 0, -10).
+        lsh_searchNoQuery(3, _, _, _, _, 0, -10).
         
 
 %% Successful Tests
 
 test(lsh_SearchNoQuery_Normal_Use) :-
         reset_Model,
-        searchNoQuery(3, NeighborResultsList, _, DistancesList, _, 0, 0),
+        lsh_searchNoQuery(3, NeighborResultsList, _, DistancesList, _, 0, 0),
         print('\nNeighborResults: '),
         print(NeighborResultsList),
         print('\nDistances: '),
         print(DistancesList).
 
-:- end_tests(searchNoQuery).
+:- end_tests(lsh_searchNoQuery).
 
 run_lsh_tests :-
         run_tests.
diff --git a/src/methods/naive_bayes_classifier/naive_bayes_classifier.pl b/src/methods/naive_bayes_classifier/naive_bayes_classifier.pl
index 898624de6e4b93cecc2587118f378d89772b58e3..ea840a47c57cbd13636622eae2544ea90b96b094 100644
--- a/src/methods/naive_bayes_classifier/naive_bayes_classifier.pl
+++ b/src/methods/naive_bayes_classifier/naive_bayes_classifier.pl
@@ -1,13 +1,13 @@
 
-:- module(naive_bayes_classifier, [     initModelWithTrain/6,
-                                        initModelNoTrain/3,
+:- module(naive_bayes_classifier, [     nbc_initModelWithTrain/6,
+                                        nbc_initModelNoTrain/3,
                                           
-                                        classifyMatrix/5,
-                                        means/2,
-                                        probabilities/2,
-                                        trainMatrix/5,
-                                        trainPoint/2,
-                                        variances/2]).
+                                        nbc_classifyMatrix/5,
+                                        nbc_means/2,
+                                        nbc_probabilities/2,
+                                        nbc_trainMatrix/5,
+                                        nbc_trainPoint/2,
+                                        nbc_variances/2]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -37,7 +37,7 @@
 %% --Description--
 %%              Initializes the classifier as per the input and then trains it by calculating the sample mean and variances.
 %%
-initModelWithTrain(DataList, DataRows, LabelsList, NumClasses, IncrementalVar, Epsilon) :-
+nbc_initModelWithTrain(DataList, DataRows, LabelsList, NumClasses, IncrementalVar, Epsilon) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(LabelsList, array(Ysize, Y)),
         initModelWithTrainI(X, Xsize, Xrownum, Y, Ysize, NumClasses, IncrementalVar, Epsilon).
@@ -60,7 +60,7 @@ foreign(initModelWithTrain, c, initModelWithTrainI(     +pointer(float_array), +
 %%              Initialize the Naive Bayes classifier without performing training.
 %%              All of the parameters of the model will be initialized to zero. Be sure to use train before calling classify, otherwise the results may be meaningless.
 %%
-initModelNoTrain(NumClasses, IncrementalVar, Epsilon) :-
+nbc_initModelNoTrain(NumClasses, IncrementalVar, Epsilon) :-
         NumClasses >= 0,
         Epsilon >= 0,
         initModelNoTrainI(NumClasses, IncrementalVar, Epsilon).
@@ -95,7 +95,7 @@ foreign(initModelNoTrain, c, initModelNoTrainI( +integer,
 %%              Classify the given points using the trained NaiveBayesClassifier model and also return estimates of the probabilities for each class in the given matrix.
 %%              The predicted labels for each point are stored in the given vector.
 %%
-classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols) :-
+nbc_classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         classifyMatrixI(X, Xsize, Xrows, Y, Ysize, Z, ZCols, ZRows),
         convert_float_array_to_list(Y, Ysize, PredictionList),
@@ -114,7 +114,7 @@ foreign(classifyMatrix, c, classifyMatrixI(     +pointer(float_array), +integer,
 %% --Description--
 %%              Get the sample means for each class.
 %%
-means(MeansList, XCols) :-
+nbc_means(MeansList, XCols) :-
         meansI(X, XCols, XRows),
         convert_float_array_to_2d_list(X, XCols, XRows, MeansList).
 
@@ -129,7 +129,7 @@ foreign(means, c, meansI(-pointer(float_array), -integer, -integer)).
 %% --Description--
 %%              Get the sample probabilities for each class.
 %%
-probabilities(ProbabilitiesList, XCols) :-
+nbc_probabilities(ProbabilitiesList, XCols) :-
         probabilitiesI(X, XCols, XRows),
         convert_float_array_to_2d_list(X, XCols, XRows, ProbabilitiesList).
 
@@ -147,7 +147,7 @@ foreign(probabilities, c, probabilitiesI(-pointer(float_array), -integer, -integ
 %% --Description--
 %%              Train the Naive Bayes classifier on the given dataset. The data must be the same dimensionality as the existing model parameters.
 %%
-trainMatrix(DataList, DataRows, LabelsList, NumClasses, IncrementalVar) :-
+nbc_trainMatrix(DataList, DataRows, LabelsList, NumClasses, IncrementalVar) :-
         NumClasses >= 0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(LabelsList, array(Ysize, Y)),
@@ -169,7 +169,7 @@ foreign(trainMatrix, c, trainMatrixI(   +pointer(float_array), +integer, +intege
 %%              Train the Naive Bayes classifier on the given point.
 %%              This will use the incremental algorithm for updating the model parameters. The data must be the same dimensionality as the existing model parameters.
 %%
-trainPoint(DataList, Label) :-
+nbc_trainPoint(DataList, Label) :-
         convert_list_to_float_array(DataList, array(Xsize, X)),
         trainPointI(X, Xsize, Label).
         
@@ -185,7 +185,7 @@ foreign(trainPoint, c, trainPointI(     +pointer(float_array), +integer,
 %% --Description--
 %%              Get the sample variances for each class.
 %%
-variances(VariancesList, XCols) :-
+nbc_variances(VariancesList, XCols) :-
         variancesI(X, XCols, XRows),
         convert_float_array_to_2d_list(X, XCols, XRows, VariancesList).
 
diff --git a/src/methods/naive_bayes_classifier/naive_bayes_classifier_test.pl b/src/methods/naive_bayes_classifier/naive_bayes_classifier_test.pl
index 8bc7b33a067a7386a8fef899400b3994fb7e9776..541088cb90c25ba3c1889fb35f48380ffb99ebbb 100644
--- a/src/methods/naive_bayes_classifier/naive_bayes_classifier_test.pl
+++ b/src/methods/naive_bayes_classifier/naive_bayes_classifier_test.pl
@@ -7,92 +7,92 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model_NoTrain :-
-        initModelNoTrain(2, 0, 0.000001).
+        nbc_initModelNoTrain(2, 0, 0.000001).
 
 reset_Model_WithTrain :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.000001).
+        nbc_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.000001).
 
 %%
-%% TESTING predicate initModelNoTrain/3
+%% TESTING predicate nbc_initModelNoTrain/3
 %%
-:- begin_tests(initModelNoTrain).      
+:- begin_tests(nbc_initModelNoTrain).      
 
 %% Failure Tests
                                             
 test(nbc_InitModelNoTrain_Negative_NumClasses, fail) :-
-        initModelNoTrain(-2, 0, 0.000001).
+        nbc_initModelNoTrain(-2, 0, 0.000001).
 
 test(nbc_InitModelNoTrain_Negative_Epsilon, fail) :-
-        initModelNoTrain(2, 0, -0.0001).
+        nbc_initModelNoTrain(2, 0, -0.0001).
         
 
 %% Successful Tests
 
 test(nbc_InitModelNoTrain_Normal_Use) :-
-        initModelNoTrain(2, 0, 0.000001).
+        nbc_initModelNoTrain(2, 0, 0.000001).
 
-:- end_tests(initModelNoTrain).
+:- end_tests(nbc_initModelNoTrain).
 
 
 
 %%
-%% TESTING predicate initModelWithTrain/6
+%% TESTING predicate nbc_initModelWithTrain/6
 %%
-:- begin_tests(initModelWithTrain).      
+:- begin_tests(nbc_initModelWithTrain).      
 
 %% Failure Tests
                                             
 test(nbc_InitModelWithTrain_Negative_NumClasses, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 0, 0.000001).
+        nbc_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 0, 0.000001).
 
 test(nbc_InitModelWithTrain_Negative_Epsilon, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, -0.000001).
+        nbc_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, -0.000001).
 
 test(nbc_InitModelWithTrain_Too_Short_Label, [error(_,system_error('Error'))]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0, 0.000001).
+        nbc_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0, 0.000001).
 
 test(nbc_InitModelWithTrain_Too_Long_Label, [error(_,system_error('Error'))]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 0, 0.000001).
+        nbc_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 0, 0.000001).
 
 test(nbc_InitModelWithTrain_Too_Many_Label_Classes, [error(_,system_error('Error'))]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 0, 0.000001).
+        nbc_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 0, 0.000001).
         
 
 %% Successful Tests
 
 test(nbc_InitModelWithTrain_Normal_Use) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.000001).
+        nbc_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0, 0.000001).
 
 test(nbc_InitModelWithTrain_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 1, 0.0042).
+        nbc_initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 1, 0.0042).
 
-:- end_tests(initModelWithTrain).
+:- end_tests(nbc_initModelWithTrain).
 
 
 
 %%
-%% TESTING predicate classifyMatrix/5
+%% TESTING predicate nbc_classifyMatrix/5
 %%
-:- begin_tests(classifyMatrix).      
+:- begin_tests(nbc_classifyMatrix).      
 
 %% Failure Tests
 
 test(nbc_ClassifyMatrix_Before_Train, [error(_,system_error('Error'))]) :-
         reset_Model_NoTrain,
-        classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _).
+        nbc_classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _).
                          
 test(nbc_ClassifyMatrix_Diffrent_Dims_To_Train, [error(_,system_error('Error'))]) :-
         reset_Model_WithTrain,
-        classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _, _, _).
+        nbc_classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _, _, _).
         
 
 %% Successful Tests
 
 test(nbc_ClassifyMatrix_Normal_Use) :-
         reset_Model_WithTrain,
-        classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredictionList, _, ProbabilitiesList),
+        nbc_classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredictionList, _, ProbabilitiesList),
         print('\nPredictions: '),
         print(PredictionList),
         print('\nProbabilities: '),
@@ -102,25 +102,25 @@ test(nbc_ClassifyMatrix_CSV_Input) :-
         reset_Model_WithTrain,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        classifyMatrix(Data, 4, PredictionList, _, ProbabilitiesList),
+        nbc_classifyMatrix(Data, 4, PredictionList, _, ProbabilitiesList),
         print('\nPredictions: '),
         print(PredictionList),
         print('\nProbabilities: '),
         print(ProbabilitiesList).
 
-:- end_tests(classifyMatrix).
+:- end_tests(nbc_classifyMatrix).
 
 
 
 %%
-%% TESTING predicate means/2
+%% TESTING predicate nbc_means/2
 %%
-:- begin_tests(means).      
+:- begin_tests(nbc_means).      
 
 %% Failure Tests
 test(nbc_Means_Before_Train, [error(_,system_error('Error'))]) :-
         reset_Model_NoTrain,
-        means(MeansList, _),
+        nbc_means(MeansList, _),
         print('\nMeans: '),
         print(MeansList).
 
@@ -129,23 +129,23 @@ test(nbc_Means_Before_Train, [error(_,system_error('Error'))]) :-
 
 test(nbc_Means_Normal_Use) :-
         reset_Model_WithTrain,
-        means(MeansList, _),
+        nbc_means(MeansList, _),
         print('\nMeans: '),
         print(MeansList).
 
-:- end_tests(means).
+:- end_tests(nbc_means).
 
 
 
 %%
-%% TESTING predicate probabilities/2
+%% TESTING predicate nbc_probabilities/2
 %%
-:- begin_tests(probabilities).      
+:- begin_tests(nbc_probabilities).      
 
 %% Failure Tests
 test(nbc_Probabilities_Before_Train, [error(_,system_error('Error'))]) :-
         reset_Model_NoTrain,
-        probabilities(ProbabilitiesList, _),
+        nbc_probabilities(ProbabilitiesList, _),
         print('\nProbabilities: '),
         print(ProbabilitiesList).
 
@@ -154,46 +154,46 @@ test(nbc_Probabilities_Before_Train, [error(_,system_error('Error'))]) :-
 
 test(nbc_Probabilities_Normal_Use) :-
         reset_Model_WithTrain,
-        probabilities(ProbabilitiesList, _),
+        nbc_probabilities(ProbabilitiesList, _),
         print('\nProbabilities: '),
         print(ProbabilitiesList).
 
-:- end_tests(probabilities).
+:- end_tests(nbc_probabilities).
 
 
 
 %%
-%% TESTING predicate trainMatrix/5
+%% TESTING predicate nbc_trainMatrix/5
 %%
-:- begin_tests(trainMatrix).      
+:- begin_tests(nbc_trainMatrix).      
 
 %% Failure Tests
                                             
 test(nbc_TrainMatrix_Negative_NumClasses, fail) :-
         reset_Model_NoTrain,
-        trainMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 0).
+        nbc_trainMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 0).
 
 test(nbc_TrainMatrix_Too_Short_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
         reset_Model_NoTrain,
-        trainMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0).
+        nbc_trainMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0).
 
 test(nbc_TrainMatrix_Too_Long_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
         reset_Model_NoTrain,
-        trainMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 0).
+        nbc_trainMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 0).
 
 test(nbc_TrainMatrix_Too_Many_Label_Classes, [error(_,system_error('The given Labels dont fit the format [0,Numclasses-1]!'))]) :-
         reset_Model_NoTrain,
-        trainMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 0).
+        nbc_trainMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 0).
         
 test(nbc_TrainMatrix_After_InitTrain, [error(_,system_error('addition: incompatible matrix dimensions: 3x1 and 4x1'))]) :-
         reset_Model_WithTrain,
-        trainMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 2, 0).
+        nbc_trainMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 2, 0).
 
 %% Successful Tests
 
 test(nbc_TrainMatrix_Normal_Use) :-
         reset_Model_NoTrain,
-        trainMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0).
+        nbc_trainMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0).
 
 
 
@@ -201,51 +201,51 @@ test(nbc_TrainMatrix_CSV_Input) :-
         reset_Model_NoTrain,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        trainMatrix(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0).
+        nbc_trainMatrix(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0).
 
-:- end_tests(trainMatrix).
+:- end_tests(nbc_trainMatrix).
 
 
 
 %%
-%% TESTING predicate trainPoint/5
+%% TESTING predicate nbc_trainPoint/5
 %%
-:- begin_tests(trainPoint).      
+:- begin_tests(nbc_trainPoint).      
 
 %% Failure Tests
 
 test(nbc_TrainPoint_Bad_Label, [error(_,system_error('Error'))]) :-
         reset_Model_NoTrain,
-        trainPoint([5.1,3.5,1.4], -5).
+        nbc_trainPoint([5.1,3.5,1.4], -5).
 
 test(nbc_TrainPoint_Too_Long_Point, [error(_,system_error('Error'))]) :-
         reset_Model_WithTrain,
-        trainPoint([5.1,3.5,1.4,3.5], 0).
+        nbc_trainPoint([5.1,3.5,1.4,3.5], 0).
         
 
 %% Successful Tests
 
 %%test(nbc_TrainPoint_Normal_Use) :-
 %%        reset_Model_NoTrain,
-%%        trainPoint([5.1,3.5,1.4], 1).
+%%        nbc_trainPoint([5.1,3.5,1.4], 1).
 
 test(nbc_TrainPoint_After_InitTrain) :-
         reset_Model_WithTrain,
-        trainPoint([5.1,3.5,1.4], 1).
+        nbc_trainPoint([5.1,3.5,1.4], 1).
 
-:- end_tests(trainPoint).
+:- end_tests(nbc_trainPoint).
 
 
 
 %%
-%% TESTING predicate variances/2
+%% TESTING predicate nbc_variances/2
 %%
-:- begin_tests(variances).      
+:- begin_tests(nbc_variances).      
 
 %% Failure Tests
 test(nbc_Variances_Before_Train, [error(_,system_error('Error'))]) :-
         reset_Model_NoTrain,
-        variances(VariancesList, _),
+        nbc_variances(VariancesList, _),
         print('\nVariances: '),
         print(VariancesList).
 
@@ -254,11 +254,11 @@ test(nbc_Variances_Before_Train, [error(_,system_error('Error'))]) :-
 
 test(nbc_Variances_Normal_Use) :-
         reset_Model_WithTrain,
-        variances(VariancesList, _),
+        nbc_variances(VariancesList, _),
         print('\nVariances: '),
         print(VariancesList).
 
-:- end_tests(variances).
+:- end_tests(nbc_variances).
 
 run_naive_bayes_classifier_tests :-
         run_tests.
diff --git a/src/methods/perceptron/perceptron.pl b/src/methods/perceptron/perceptron.pl
index b71547e9bad1b18e216e4404cdfd4ff0d2f1b9b3..f829b50b4bb6912e84e44d16c0ca97b610930daa 100644
--- a/src/methods/perceptron/perceptron.pl
+++ b/src/methods/perceptron/perceptron.pl
@@ -1,10 +1,10 @@
 
-:- module(perceptron, [ initModelNoTrain/3,
-                        initModelWithTrain/5,
-                        biases/1,
-                        classify/3,
-                        train/5,
-                        weights/2]).
+:- module(perceptron, [ perceptron_initModelNoTrain/3,
+                        perceptron_initModelWithTrain/5,
+                        perceptron_biases/1,
+                        perceptron_classify/3,
+                        perceptron_train/5,
+                        perceptron_weights/2]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -31,7 +31,7 @@
 %% --Description-- 
 %%              Initilizes the perceptron model and its weight matrix but doesnt train it.
 %%
-initModelNoTrain(NumClasses, Dimensionality, MaxIterations) :-
+perceptron_initModelNoTrain(NumClasses, Dimensionality, MaxIterations) :-
         NumClasses >= 0,
         Dimensionality > 0,
         MaxIterations >= 0,
@@ -50,7 +50,7 @@ foreign(initModelNoTrain, c, initModelNoTrainI(+integer, +integer, +integer)).
 %% --Description-- 
 %%              Initilizes the perceptron model and its weight matrix and trains it with the given data.
 %%
-initModelWithTrain(DataList, DataRows, LabelsList, NumClasses, MaxIterations) :-
+perceptron_initModelWithTrain(DataList, DataRows, LabelsList, NumClasses, MaxIterations) :-
         NumClasses >= 0,
         MaxIterations >= 0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
@@ -70,7 +70,7 @@ foreign(initModelWithTrain, c, initModelWithTrainI(     +pointer(float_array), +
 %% --Description-- 
 %%              Get the biases.
 %%
-biases(BiasesList) :-
+perceptron_biases(BiasesList) :-
         biasesI(Y, Ysize),
         convert_float_array_to_list(Y, Ysize, BiasesList).
 
@@ -85,7 +85,7 @@ foreign(biases, c, biasesI(-pointer(float_array), -integer)).
 %% --Description-- 
 %%              After training, use the weights matrix to classify test, and put the predicted classes in predictedLabels.
 %%
-classify(TestList, TestRows, PredictLabelList) :-
+perceptron_classify(TestList, TestRows, PredictLabelList) :-
         convert_list_to_float_array(TestList, TestRows, array(Xsize, Xrows, X)),
         classifyI(X, Xsize, Xrows, Y, Ysize),
         convert_float_array_to_list(Y, Ysize, PredictLabelList).
@@ -104,7 +104,7 @@ foreign(classify, c, classifyI( +pointer(float_array), +integer, +integer,
 %% --Description--
 %%              Train the perceptron on the given data for up to the maximum number of iterations. This training does not reset the model weights, so you can call train/8 on multiple datasets sequentially.
 %%
-train(DataList, DataRows, LabelsList, NumClasses, WeightsList) :-
+perceptron_train(DataList, DataRows, LabelsList, NumClasses, WeightsList) :-
         NumClasses >= 0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(LabelsList, array(Ysize, Y)),
@@ -125,7 +125,7 @@ foreign(train, c, trainI(       +pointer(float_array), +integer, +integer,
 %% --Description--
 %%              Get the weight matrix.
 %%
-weights(WeightsList, XCols) :-
+perceptron_weights(WeightsList, XCols) :-
         weightsI(X, XCols, XRows),
         convert_float_array_to_2d_list(X, XCols, XRows, WeightsList).
 
diff --git a/src/methods/perceptron/perceptron_test.pl b/src/methods/perceptron/perceptron_test.pl
index bb8d3db9f922f434cde7c468fea7b9b5ca81b6d6..bd77ad9516396237224c309b45c16e70d18388a5 100644
--- a/src/methods/perceptron/perceptron_test.pl
+++ b/src/methods/perceptron/perceptron_test.pl
@@ -7,92 +7,92 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model_NoTrain :-
-        initModelNoTrain(2, 3, 1000).
+        perceptron_initModelNoTrain(2, 3, 1000).
 
 reset_Model_WithTrain :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 1000).
+        perceptron_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 1000).
 
 
 
 %%
-%% TESTING predicate initModelNoTrain/3
+%% TESTING predicate perceptron_initModelNoTrain/3
 %%
-:- begin_tests(initModelNoTrain).      
+:- begin_tests(perceptron_initModelNoTrain).      
 
 %% Failure Tests
                                             
 test(perceptron_InitModelNoTrain_Negative_NumClasses, fail) :-
-        initModelNoTrain(-2, 3, 1000).
+        perceptron_initModelNoTrain(-2, 3, 1000).
 
 test(perceptron_InitModelNoTrain_Negative_Dimensionality, fail) :-
-        initModelNoTrain(2, -3, 1000).
+        perceptron_initModelNoTrain(2, -3, 1000).
 
 test(perceptron_InitModelNoTrain_Negative_MaxIterations, fail) :-
-        initModelNoTrain(2, 3, -1000).
+        perceptron_initModelNoTrain(2, 3, -1000).
         
 
 %% Successful Tests
 
 test(perceptron_InitModelNoTrain_Normal_Use) :-
-        initModelNoTrain(2, 3, 1000).
+        perceptron_initModelNoTrain(2, 3, 1000).
 
 test(perceptron_InitModelNoTrain_Alternative_Input) :-
-        initModelNoTrain(0, 1, 1000).
+        perceptron_initModelNoTrain(0, 1, 1000).
 
-:- end_tests(initModelNoTrain).
+:- end_tests(perceptron_initModelNoTrain).
 
 
 
 %%
-%% TESTING predicate initModelWithTrain/5
+%% TESTING predicate perceptron_initModelWithTrain/5
 %%
-:- begin_tests(initModelWithTrain).      
+:- begin_tests(perceptron_initModelWithTrain).      
 
 %% Failure Tests
 
 test(perceptron_InitModelWithTrain_Negative_NumClasses, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 1000).
+        perceptron_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 1000).
 
 
 test(perceptron_InitModelWithTrain_Negative_MaxIterations, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, -1000).
+        perceptron_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, -1000).
                                             
 
 test(random_forest_InitModelWithTrainNoWeights_Too_Short_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 1000).
+        perceptron_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 1000).
 
 test(random_forest_InitModelWithTrainNoWeights_Too_Long_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 1000).
+        perceptron_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 1000).
 
 test(random_forest_InitModelWithTrainNoWeights_Too_Many_Label_Classes, [error(_,system_error('The given Labels dont fit the format [0,Numclasses-1]!'))]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 1000).
+        perceptron_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 1000).
         
 
 %% Successful Tests
 
 test(perceptron_InitModelWithTrain_Normal_Use) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 1000).
+        perceptron_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 1000).
 
 test(perceptron_InitModelWithTrain_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 1000).
+        perceptron_initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 1000).
 
-:- end_tests(initModelWithTrain).
+:- end_tests(perceptron_initModelWithTrain).
 
 
 
 %%
-%% TESTING predicate biases/1
+%% TESTING predicate perceptron_biases/1
 %%
-:- begin_tests(biases).      
+:- begin_tests(perceptron_biases).      
 
 %% Failure Tests
 
 %% Doesnt cause an Error                            
 test(perceptron_Biases_Before_Train) :-
         reset_Model_NoTrain,
-        biases(Biases),
+        perceptron_biases(Biases),
         print('\nBiases: '),
         print(Biases).
         
@@ -101,107 +101,107 @@ test(perceptron_Biases_Before_Train) :-
 
 test(perceptron_Biases_AfterTrain) :-
         reset_Model_WithTrain,
-        biases(Biases),
+        perceptron_biases(Biases),
         print('\nBiases: '),
         print(Biases).
 
-:- end_tests(biases).
+:- end_tests(perceptron_biases).
 
 
 
 %%
-%% TESTING predicate classify/3
+%% TESTING predicate perceptron_classify/3
 %%
-:- begin_tests(classify).      
+:- begin_tests(perceptron_classify).      
 
 %% Failure Tests
 
 test(perceptron_Classify_Before_Train_Wrong_Dims, [error(_,system_error('matrix multiplication: incompatible matrix dimensions: 2x3 and 4x1'))]) :-
         reset_Model_NoTrain,
-        classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _).
+        perceptron_classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _).
 
 test(perceptron_Classify_Different_Dims_Than_Train, [error(_,system_error('matrix multiplication: incompatible matrix dimensions: 2x3 and 4x1'))]) :-
         reset_Model_WithTrain,
-        classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _).
+        perceptron_classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _).
         
 
 %% Successful Tests
 
 test(perceptron_Classify_Before_Train) :-
         reset_Model_NoTrain,
-        classify([1.0,2.0,3.0], 3, _).
+        perceptron_classify([1.0,2.0,3.0], 3, _).
 
 test(perceptron_Classify_Normal_Use) :-
         reset_Model_WithTrain,
-        classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredictList),
+        perceptron_classify([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, PredictList),
         print('\nPredicted Labels: '),
         print(PredictList).
 
-:- end_tests(classify).
+:- end_tests(perceptron_classify).
 
 
 
 %%
-%% TESTING predicate train/5
+%% TESTING predicate perceptron_train/5
 %%
-:- begin_tests(train).      
+:- begin_tests(perceptron_train).      
 
 %% Failure Tests
                                             
 test(perceptron_Train_Negaitve_NumClasses, fail) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, [0.1,0.2,0.3,0.4]).
+        perceptron_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, [0.1,0.2,0.3,0.4]).
 
 %% Seems to overide the dimensionality from reset_Model_NoTrain
 test(perceptron_Train_Too_Small_Data_Dims) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 2, [0.1,0.2,0.3]).
+        perceptron_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 2, [0.1,0.2,0.3]).
 
 
 test(perceptron_Train_Too_Short_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, [0.1,0.2,0.3,0.4]).
+        perceptron_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, [0.1,0.2,0.3,0.4]).
 
 test(perceptron_Train_Too_Long_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, [0.1,0.2,0.3,0.4]).
+        perceptron_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, [0.1,0.2,0.3,0.4]).
 
 test(perceptron_Train_Too_Many_Label_Classes, [error(_,system_error('The given Labels dont fit the format [0,Numclasses-1]!'))]) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, [0.1,0.2,0.3,0.4]).
+        perceptron_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, [0.1,0.2,0.3,0.4]).
 
 
 test(perceptron_Train_Too_Short_Weights, [error(_,system_error('The number of data points does not match the number of weights!'))]) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.1,0.2]).
+        perceptron_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.1,0.2]).
 
 test(perceptron_Train_Too_Long_Weights, [error(_,system_error('The number of data points does not match the number of weights!'))]) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.1,0.2,0.3,0.4,0.6,0.7]).
+        perceptron_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.1,0.2,0.3,0.4,0.6,0.7]).
 
 
 %% Successful Tests
 
 test(perceptron_Train_Normal_Use) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.1,0.2,0.3,0.4]).
+        perceptron_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.1,0.2,0.3,0.4]).
 
 
-:- end_tests(train).
+:- end_tests(perceptron_train).
 
 
 
 %%
-%% TESTING predicate weights/2
+%% TESTING predicate perceptron_weights/2
 %%
-:- begin_tests(weights).      
+:- begin_tests(perceptron_weights).      
 
 %% Failure Tests
 
 %% Doesnt cause an error                                          
 test(perceptron_Weights_Before_Train) :-
         reset_Model_NoTrain,
-        weights(Weights, _),
+        perceptron_weights(Weights, _),
         print('\nWeights: '),
         print(Weights).
         
@@ -210,11 +210,11 @@ test(perceptron_Weights_Before_Train) :-
 
 test(perceptron_Weights_AfterTrain) :-
         reset_Model_WithTrain,
-        weights(Weights, _),
+        perceptron_weights(Weights, _),
         print('\nWeights: '),
         print(Weights).
 
-:- end_tests(weights).
+:- end_tests(perceptron_weights).
 
 run_perceptron_tests :-
         run_tests.
diff --git a/src/methods/radical/radical.pl b/src/methods/radical/radical.pl
index b51f77c7cb258ba1a4c0774e1d5e362ea178b161..902e2b7ea0ccad16dce5ba3d6c175e8571776494 100644
--- a/src/methods/radical/radical.pl
+++ b/src/methods/radical/radical.pl
@@ -1,5 +1,5 @@
 
-:- module(radical, [    initModel/5,
+:- module(radical, [    radical_initModel/5,
                         doRadical/6,
                         doRadical2D/3]).
 
@@ -31,7 +31,7 @@
 %% --Description--
 %%              Initilizes the radical model.
 %%
-initModel(NoiseStdDev, Replicates, Angles, Sweeps, M) :-
+radical_initModel(NoiseStdDev, Replicates, Angles, Sweeps, M) :-
         NoiseStdDev >= 0,
         Replicates > 0,
         Angles > 0,
diff --git a/src/methods/radical/radical_test.pl b/src/methods/radical/radical_test.pl
index b915cbb79ef9988ebb7d21db1c590993d2baa72c..6fb60d37273d031bc8cd03601d777662620fd496 100644
--- a/src/methods/radical/radical_test.pl
+++ b/src/methods/radical/radical_test.pl
@@ -7,12 +7,12 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model :-
-        initModel(0.175,30,150,0,0).
+        radical_initModel(0.175,30,150,0,0).
 
 %%
-%% TESTING predicate initModel/5
+%% TESTING predicate radical_initModel/5
 %%
-:- begin_tests(initModel).      
+:- begin_tests(radical_initModel).      
 
 %% Failure Tests
 
@@ -29,24 +29,24 @@ test(radical_InitModel_DoRadical2D_No_Init) :-
         print(Result).
                                             
 test(radical_InitModel_Negative_NoiseStdDev, fail) :-
-        initModel(-0.175,30,150,0,0).
+        radical_initModel(-0.175,30,150,0,0).
 
 test(radical_InitModel_Negative_Replicates, fail) :-
-        initModel(0.175,-30,150,0,0).
+        radical_initModel(0.175,-30,150,0,0).
 
 test(radical_InitModel_Negative_Angles, fail) :-
-        initModel(0.175,30,-150,0,0).
+        radical_initModel(0.175,30,-150,0,0).
 
 test(radical_InitModel_Negative_Sweeps, fail) :-
-        initModel(0.175,30,150,-1,0).
+        radical_initModel(0.175,30,150,-1,0).
 
 
 %% Successful Tests
 
 test(radical_InitModel_Normal_Use) :-
-        initModel(0.175,30,150,0,0).
+        radical_initModel(0.175,30,150,0,0).
 
-:- end_tests(initModel).
+:- end_tests(radical_initModel).
 
 
 
diff --git a/src/methods/random_forest/random_forest.pl b/src/methods/random_forest/random_forest.pl
index b88d027883bfee2fd6ae87a3c4ac3a5436698cd3..c299e5096bad8dbe10fdb7bebba43a0b35f83b34 100644
--- a/src/methods/random_forest/random_forest.pl
+++ b/src/methods/random_forest/random_forest.pl
@@ -1,12 +1,12 @@
 
-:- module(random_forest, [      initModelNoTrain/0,
-                                initModelWithTrainNoWeights/8,
-                                initModelWithTrainWithWeights/9,
-                                classifyPoint/3,
-                                classifyMatrix/5,
-                                numTrees/1,
-                                trainNoWeights/9,
-                                trainWithWeights/10]).
+:- module(random_forest, [      random_forest_initModelNoTrain/0,
+                                random_forest_initModelWithTrainNoWeights/8,
+                                random_forest_initModelWithTrainWithWeights/9,
+                                random_forest_classifyPoint/3,
+                                random_forest_classifyMatrix/5,
+                                random_forest_numTrees/1,
+                                random_forest_trainNoWeights/9,
+                                random_forest_trainWithWeights/10]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -32,7 +32,7 @@
 %% --Description--
 %%              Initilizes the model without training it.
 %%
-initModelNoTrain :-
+random_forest_initModelNoTrain :-
         initModelNoTrainI(0).
 
 foreign(initModelNoTrain, c, initModelNoTrainI(+integer)).
@@ -52,7 +52,7 @@ foreign(initModelNoTrain, c, initModelNoTrainI(+integer)).
 %% --Description--
 %%              Initilizes the model and trains it but does not apply weights to it.
 %%
-initModelWithTrainNoWeights(DataList, DataRows, LabelsList, NumClasses, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth) :-
+random_forest_initModelWithTrainNoWeights(DataList, DataRows, LabelsList, NumClasses, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth) :-
         NumClasses >= 0,
         NumTrees > 0,
         MinimumLeafSize > 0,
@@ -83,7 +83,7 @@ foreign(initModelWithTrainNoWeights, c, initModelWithTrainNoWeightsI(
 %% --Description--
 %%              Initilizes the model, trains it and applies weights to it.
 %%
-initModelWithTrainWithWeights(DataList, DataRows, LabelsList, NumClasses, WeightsList, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth) :-
+random_forest_initModelWithTrainWithWeights(DataList, DataRows, LabelsList, NumClasses, WeightsList, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth) :-
         NumClasses >= 0,
         NumTrees > 0,
         MinimumLeafSize > 0,
@@ -113,7 +113,7 @@ foreign(initModelWithTrainWithWeights, c, initModelWithTrainWithWeightsI(
 %%              Predict the class of the given point and return the predicted class probabilities for each class.
 %%              Random forest has to be train before using this.
 %%
-classifyPoint(DataList, Prediction, AssignList) :-
+random_forest_classifyPoint(DataList, Prediction, AssignList) :-
         convert_list_to_float_array(DataList, array(Xsize, X)),
         classifyPointI(X, Xsize, Prediction, Y, Ysize),
         convert_float_array_to_list(Y, Ysize, AssignList).
@@ -134,7 +134,7 @@ foreign(classifyPoint, c, classifyPointI(       +pointer(float_array), +integer,
 %%              Predict the classes of each point in the given dataset, also returning the predicted class probabilities for each point.
 %%              Random forest has to be train before using this.
 %%
-classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols) :-
+random_forest_classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         classifyMatrixI(X, Xsize, Xrows, Y, Ysize, Z, ZCols, ZRows),
         convert_float_array_to_list(Y, Ysize, PredictionList),
@@ -153,7 +153,7 @@ foreign(classifyMatrix, c, classifyMatrixI(     +pointer(float_array), +integer,
 %% --Description--
 %%              Get the number of trees in the forest.
 %%
-numTrees(NumTrees) :-
+random_forest_numTrees(NumTrees) :-
         numTreesI(NumTrees).
 foreign(numTrees, c, numTreesI([-integer])).
 
@@ -174,7 +174,7 @@ foreign(numTrees, c, numTreesI([-integer])).
 %%              Train the random forest on the given labeled training data with the given number of trees.
 %%              The minimumLeafSize and minimumGainSplit parameters are given to each individual decision tree during tree building.
 %%
-trainNoWeights(DataList, DataRows, LabelsList, NumClasses, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth, Entropy) :-
+random_forest_trainNoWeights(DataList, DataRows, LabelsList, NumClasses, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth, Entropy) :-
         NumClasses >= 0,
         NumTrees > 0,
         MinimumLeafSize > 0,
@@ -207,7 +207,7 @@ foreign(trainNoWeights, c, trainNoWeightsI(     +pointer(float_array), +integer,
 %%              Train the random forest on the given weighted labeled training data with the given number of trees.
 %%              The minimumLeafSize and minimumGainSplit parameters are given to each individual decision tree during tree building.
 %%
-trainWithWeights(DataList, DataRows, LabelsList, NumClasses, WeightsList, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth, Entropy) :-
+random_forest_trainWithWeights(DataList, DataRows, LabelsList, NumClasses, WeightsList, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth, Entropy) :-
         NumClasses >= 0,
         NumTrees > 0,
         MinimumLeafSize > 0,
diff --git a/src/methods/random_forest/random_forest_test.pl b/src/methods/random_forest/random_forest_test.pl
index ac6b4e1bddd63657154470110aabf8a079a580aa..249a498ba53a9b18a08eef3237c1f8792b4e8181 100644
--- a/src/methods/random_forest/random_forest_test.pl
+++ b/src/methods/random_forest/random_forest_test.pl
@@ -7,10 +7,10 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model_NoTrain :-
-        initModelNoTrain.
+        random_forest_initModelNoTrain.
 
 reset_Model_WithTrain :-
-        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, 0).
 
 
 %%
@@ -22,129 +22,129 @@ reset_Model_WithTrain :-
 %% Successful Tests
 
 test(random_forest_InitModelNoTrain) :-
-        initModelNoTrain.
+        random_forest_initModelNoTrain.
 
 :- end_tests(initModelNoTrainI).
 
 
 
 %%
-%% TESTING predicate initModelWithTrainNoWeights/8
+%% TESTING predicate random_forest_initModelWithTrainNoWeights/8
 %%
-:- begin_tests(initModelWithTrainNoWeights).      
+:- begin_tests(random_forest_initModelWithTrainNoWeights).      
 
 %% Failure Tests
                                             
 test(random_forest_InitModelWithTrainNoWeights_Negative_NumClasses, fail) :-
-        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 20, 1, 0.0000001, 0).
 
 
 test(random_forest_InitModelWithTrainNoWeights_Negative_NumTrees, fail) :-
-        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, -20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, -20, 1, 0.0000001, 0).
 
 test(random_forest_InitModelWithTrainNoWeights_Negative_MinLeafSize, fail) :-
-        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, -1, 0.0000001, 0).
+        random_forest_initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, -1, 0.0000001, 0).
 
 test(random_forest_InitModelWithTrainNoWeights_Negative_MinGainSplit, fail) :-
-        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, -0.0000001, 0).
+        random_forest_initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, -0.0000001, 0).
 
 test(random_forest_InitModelWithTrainNoWeights_Negative_MaxDepth, fail) :-
-        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, -1).
+        random_forest_initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, -1).
 
 
 test(random_forest_InitModelWithTrainNoWeights_Too_Short_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
-        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 20, 1, 0.0000001, 0).
 
 test(random_forest_InitModelWithTrainNoWeights_Too_Long_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
-        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 20, 1, 0.0000001, 0).
 
 test(random_forest_InitModelWithTrainNoWeights_Too_Many_Label_Classes, [error(_,system_error('The given Labels dont fit the format [0,Numclasses-1]!'))]) :-
-        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 20, 1, 0.0000001, 0).
         
 
 %% Successful Tests
 
 test(random_forest_InitModelWithTrainNoWeights_Normal_Use) :-
-        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, 0).
 
 test(random_forest_InitModelWithTrainNoWeights_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModelWithTrainNoWeights(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainNoWeights(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 20, 1, 0.0000001, 0).
 
-:- end_tests(initModelWithTrainNoWeights).
+:- end_tests(random_forest_initModelWithTrainNoWeights).
 
 
 
 %%
-%% TESTING predicate initModelWithTrainWithWeights/9
+%% TESTING predicate random_forest_initModelWithTrainWithWeights/9
 %%
-:- begin_tests(initModelWithTrainWithWeights).      
+:- begin_tests(random_forest_initModelWithTrainWithWeights).      
 
 %% Failure Tests
                                             
 test(random_forest_InitModelWithTrainWithWeights_Negative_NumClasses, fail) :-
-        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
 
 test(random_forest_InitModelWithTrainWithWeights_Negative_NumTrees, fail) :-
-        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], -20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], -20, 1, 0.0000001, 0).
 
 test(random_forest_InitModelWithTrainWithWeights_Negative_MinLeafSize, fail) :-
-        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, -1, 0.0000001, 0).
+        random_forest_initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, -1, 0.0000001, 0).
 
 test(random_forest_initModelWithTrainWithWeights_Negative_MinGainSplit, fail) :-
-        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, -0.0000001, 0).
+        random_forest_initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, -0.0000001, 0).
 
 test(random_forest_InitModelWithTrainWithWeights_Negative_MaxDepth, fail) :-
-        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, -1).
+        random_forest_initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, -1).
 
 
 test(random_forest_InitModelWithTrainWithWeights_Too_Short_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
-        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
 
 test(random_forest_InitModelWithTrainWithWeights_Too_Long_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
-        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
 
 test(random_forest_InitModelWithTrainWithWeights_Too_Many_Label_Classes, [error(_,system_error('The given Labels dont fit the format [0,Numclasses-1]!'))]) :-
-        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
 
 
 test(random_forest_InitModelWithTrainWithWeights_Too_Short_Weights, [error(_,system_error('The number of data points does not match the number of weights!'))]) :-
-        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1], 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1], 20, 1, 0.0000001, 0).
 
 test(random_forest_InitModelWithTrainWithWeights_Too_Long_Weights, [error(_,system_error('The number of data points does not match the number of weights!'))]) :-
-        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2,0.43,2.0], 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2,0.43,2.0], 20, 1, 0.0000001, 0).
         
 
 %% Successful Tests
 
 test(random_forest_InitModelWithTrainWithWeights_Normal_Use) :-
-        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
 
 test(random_forest_InitModelWithTrainWithWeights_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModelWithTrainWithWeights(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, [0.5,0.1,0.7,1.2,0.5,3.4,0.9,0.5,0.5,1.2], 20, 1, 0.0000001, 0).
+        random_forest_initModelWithTrainWithWeights(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, [0.5,0.1,0.7,1.2,0.5,3.4,0.9,0.5,0.5,1.2], 20, 1, 0.0000001, 0).
 
-:- end_tests(initModelWithTrainWithWeights).
+:- end_tests(random_forest_initModelWithTrainWithWeights).
 
 
 
 %%
-%% TESTING predicate classifyPoint/3
+%% TESTING predicate random_forest_classifyPoint/3
 %%
-:- begin_tests(classifyPoint).      
+:- begin_tests(random_forest_classifyPoint).      
 
 %% Failure Tests
                                             
 test(random_forest_ClassifyPoint_Before_Train, [error(_,system_error('RandomForest::Classify(): no random forest trained!'))]) :-
         reset_Model_NoTrain,
-        classifyPoint([5.1,3.5,1.4], _, _).
+        random_forest_classifyPoint([5.1,3.5,1.4], _, _).
 
 %% Point dim seems to not matter
 test(random_forest_ClassifyPoint_Smaller_Dim_To_Train) :-
         reset_Model_WithTrain,
-        classifyPoint([5.1,3.5], Prediction, ProbabilitiesList),
+        random_forest_classifyPoint([5.1,3.5], Prediction, ProbabilitiesList),
         print('\nPrediction: '),
         print(Prediction),
         print('\nProbabilities: '),
@@ -153,7 +153,7 @@ test(random_forest_ClassifyPoint_Smaller_Dim_To_Train) :-
 %% Point dim seems to not matter
 test(random_forest_ClassifyPoint_Larger_Dim_To_Train) :-
         reset_Model_WithTrain,
-        classifyPoint([5.1,3.5,1.4,4.3,0.4], Prediction, ProbabilitiesList),
+        random_forest_classifyPoint([5.1,3.5,1.4,4.3,0.4], Prediction, ProbabilitiesList),
         print('\nPrediction: '),
         print(Prediction),
         print('\nProbabilities: '),
@@ -164,31 +164,31 @@ test(random_forest_ClassifyPoint_Larger_Dim_To_Train) :-
 
 test(random_forest_ClassifyPoint_Normal_Use) :-
         reset_Model_WithTrain,
-        classifyPoint([5.1,3.5,1.4], Prediction, ProbabilitiesList),
+        random_forest_classifyPoint([5.1,3.5,1.4], Prediction, ProbabilitiesList),
         print('\nPrediction: '),
         print(Prediction),
         print('\nProbabilities: '),
         print(ProbabilitiesList).
 
-:- end_tests(classifyPoint).
+:- end_tests(random_forest_classifyPoint).
 
 
 
 %%
-%% TESTING predicate classifyMatrix/5
+%% TESTING predicate random_forest_classifyMatrix/5
 %%
-:- begin_tests(classifyMatrix).      
+:- begin_tests(random_forest_classifyMatrix).      
 
 %% Failure Tests
                                             
 test(random_forest_ClassifyMatrix_Before_Train, [error(_,system_error('RandomForest::Classify(): no random forest trained!'))]) :-
         reset_Model_NoTrain,
-        classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _).
+        random_forest_classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _).
 
 %% Classify dim seems to not matter
 test(random_forest_ClassifyMatrix_Smaller_Dim_To_Train) :-
         reset_Model_WithTrain,
-        classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, Prediction, ProbabilitiesList, _),
+        random_forest_classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, Prediction, ProbabilitiesList, _),
         print('\nPrediction: '),
         print(Prediction),
         print('\nProbabilities: '),
@@ -197,7 +197,7 @@ test(random_forest_ClassifyMatrix_Smaller_Dim_To_Train) :-
 %% Classify dim seems to not matter
 test(random_forest_ClassifyMatrix_Larger_Dim_To_Train) :-
         reset_Model_WithTrain,
-        classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 2, Prediction, ProbabilitiesList, _),
+        random_forest_classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 2, Prediction, ProbabilitiesList, _),
         print('\nPrediction: '),
         print(Prediction),
         print('\nProbabilities: '),
@@ -208,27 +208,27 @@ test(random_forest_ClassifyMatrix_Larger_Dim_To_Train) :-
 
 test(random_forest_ClassifyMatrix_Normal_Use) :-
         reset_Model_WithTrain,
-        classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, Prediction, ProbabilitiesList, _),
+        random_forest_classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, Prediction, ProbabilitiesList, _),
         print('\nPrediction: '),
         print(Prediction),
         print('\nProbabilities: '),
         print(ProbabilitiesList).
 
-:- end_tests(classifyMatrix).
+:- end_tests(random_forest_classifyMatrix).
 
 
 
 %%
-%% TESTING predicate numTrees/1
+%% TESTING predicate random_forest_numTrees/1
 %%
-:- begin_tests(numTrees).      
+:- begin_tests(random_forest_numTrees).      
 
 %% Failure Tests
                        
 %% doesnt cause an error                     
 test(random_forest_NumTrees_Before_Train) :-
         reset_Model_NoTrain,
-        numTrees(NumTrees),
+        random_forest_numTrees(NumTrees),
         print('\nNumber of Trees: '),
         print(NumTrees).
         
@@ -237,60 +237,60 @@ test(random_forest_NumTrees_Before_Train) :-
 
 test(random_forest_NumTrees_Normal_Use) :-
         reset_Model_WithTrain,
-        numTrees(NumTrees),
+        random_forest_numTrees(NumTrees),
         print('\nNumber of Trees: '),
         print(NumTrees).
 
-:- end_tests(numTrees).
+:- end_tests(random_forest_numTrees).
 
 
 
 %%
-%% TESTING predicate trainNoWeights/9
+%% TESTING predicate random_forest_trainNoWeights/9
 %%
-:- begin_tests(trainNoWeights).      
+:- begin_tests(random_forest_trainNoWeights).      
                          
 %% Failure Tests
                                             
 test(random_forest_TrainNoWeights_Negative_NumClasses, fail) :-
         reset_Model_NoTrain,
-        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 20, 1, 0.0000001, 0, _).
+        random_forest_trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 20, 1, 0.0000001, 0, _).
 
 test(random_forest_TrainNoWeights_Negative_NumTrees, fail) :-
         reset_Model_NoTrain,
-        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, -20, 1, 0.0000001, 0, _).
+        random_forest_trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, -20, 1, 0.0000001, 0, _).
 
 test(random_forest_TrainNoWeights_Negative_MinLeafSize, fail) :-
         reset_Model_NoTrain,
-        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, -1, 0.0000001, 0, _).
+        random_forest_trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, -1, 0.0000001, 0, _).
 
 test(random_forest_TrainNoWeights_Negative_MinGainSplit, fail) :-
         reset_Model_NoTrain,
-        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, -0.0000001, 0, _).
+        random_forest_trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, -0.0000001, 0, _).
 
 test(random_forest_TrainNoWeights_Negative_MaxDepth, fail) :-
         reset_Model_NoTrain,
-        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, -1, _).
+        random_forest_trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, -1, _).
 
 
 test(random_forest_TrainNoWeights_Too_Short_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
         reset_Model_NoTrain,
-        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 20, 1, 0.0000001, 0, _).
+        random_forest_trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 20, 1, 0.0000001, 0, _).
 
 test(random_forest_TrainNoWeights_Too_Long_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
         reset_Model_NoTrain,
-        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 20, 1, 0.0000001, 0, _).
+        random_forest_trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 20, 1, 0.0000001, 0, _).
 
 test(random_forest_TrainNoWeights_Too_Many_Label_Classes, [error(_,system_error('The given Labels dont fit the format [0,Numclasses-1]!'))]) :-
         reset_Model_NoTrain,
-        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 20, 1, 0.0000001, 0, _).
+        random_forest_trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 20, 1, 0.0000001, 0, _).
         
 
 %% Successful Tests
 
 test(random_forest_TrainNoWeights_Normal_Use) :-
         reset_Model_NoTrain,
-        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, 0, Entropy),
+        random_forest_trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, 0, Entropy),
         print('\nEntropy: '),
         print(Entropy).
 
@@ -298,69 +298,69 @@ test(random_forest_TrainNoWeights_CSV_Input) :-
         reset_Model_NoTrain,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        trainNoWeights(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 20, 1, 0.0000001, 0, Entropy),
+        random_forest_trainNoWeights(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 20, 1, 0.0000001, 0, Entropy),
         print('\nEntropy: '),
         print(Entropy).
 
-:- end_tests(trainNoWeights).
+:- end_tests(random_forest_trainNoWeights).
 
 
 
 %%
-%% TESTING predicate trainWithWeights/10
+%% TESTING predicate random_forest_trainWithWeights/10
 %%
-:- begin_tests(trainWithWeights).      
+:- begin_tests(random_forest_trainWithWeights).      
 
 %% Failure Tests
                                             
 test(random_forest_TrainWithWeights_Negative_NumClasses, fail) :-
         reset_Model_NoTrain,
-        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
+        random_forest_trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
 
 test(random_forest_TrainWithWeights_Negative_NumTrees, fail) :-
         reset_Model_NoTrain,
-        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], -20, 1, 0.0000001, 0, _).
+        random_forest_trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], -20, 1, 0.0000001, 0, _).
 
 test(random_forest_TrainWithWeights_Negative_MinLeafSize, fail) :-
         reset_Model_NoTrain,
-        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, -1, 0.0000001, 0, _).
+        random_forest_trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, -1, 0.0000001, 0, _).
 
 test(random_forest_TrainWithWeights_Negative_MinGainSplit, fail) :-
         reset_Model_NoTrain,
-        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, -0.0000001, 0, _).
+        random_forest_trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, -0.0000001, 0, _).
 
 test(random_forest_TrainWithWeights_Negative_MaxDepth, fail) :-
         reset_Model_NoTrain,
-        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, -1, _).
+        random_forest_trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, -1, _).
 
 
 test(random_forest_TrainWithWeights_Too_Short_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
         reset_Model_NoTrain,
-        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
+        random_forest_trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
 
 test(random_forest_TrainWithWeights_Too_Long_Label, [error(_,system_error('The number of data points does not match the number of labels!'))]) :-
         reset_Model_NoTrain,
-        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
+        random_forest_trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
 
 test(random_forest_TrainWithWeights_Too_Many_Label_Classes, [error(_,system_error('The given Labels dont fit the format [0,Numclasses-1]!'))]) :-
         reset_Model_NoTrain,
-        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
+        random_forest_trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
 
 
 test(random_forest_TrainWithWeights_Too_Short_Weights, [error(_,system_error('The number of data points does not match the number of weights!'))]) :-
         reset_Model_NoTrain,
-        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1], 20, 1, 0.0000001, 0, _).
+        random_forest_trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1], 20, 1, 0.0000001, 0, _).
 
 test(random_forest_TrainWithWeights_Too_Long_Weights, [error(_,system_error('The number of data points does not match the number of weights!'))]) :-
         reset_Model_NoTrain,
-        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2,0.43,2.0], 20, 1, 0.0000001, 0, _).
+        random_forest_trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2,0.43,2.0], 20, 1, 0.0000001, 0, _).
         
 
 %% Successful Tests
 
 test(random_forest_TrainWithWeights_Normal_Use) :-
         reset_Model_NoTrain,
-        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, Entropy),
+        random_forest_trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, Entropy),
         print('\nEntropy: '),
         print(Entropy).
 
@@ -368,11 +368,11 @@ test(random_forest_TrainWithWeights_CSV_Input) :-
         reset_Model_NoTrain,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        trainWithWeights(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, [0.5,0.1,0.7,1.2,0.5,3.4,0.9,0.5,0.5,1.2], 20, 1, 0.0000001, 0, Entropy),
+        random_forest_trainWithWeights(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, [0.5,0.1,0.7,1.2,0.5,3.4,0.9,0.5,0.5,1.2], 20, 1, 0.0000001, 0, Entropy),
         print('\nEntropy: '),
         print(Entropy).
 
-:- end_tests(trainWithWeights).
+:- end_tests(random_forest_trainWithWeights).
 
 run_random_forest_tests :-
         run_tests.
diff --git a/src/methods/softmax_regression/softmax_regression.pl b/src/methods/softmax_regression/softmax_regression.pl
index 17e11624c49d642357995a3bf043b9c63483d944..2bfb6db902de0a43108a461a7bac9d043a66ad34 100644
--- a/src/methods/softmax_regression/softmax_regression.pl
+++ b/src/methods/softmax_regression/softmax_regression.pl
@@ -1,12 +1,12 @@
 
-:- module(softmax_regression, [ initModelNoTrain/3,
-                                initModelWithTrain/6,
-                                classifyPoint/2,
-                                classifyMatrix/5,
-                                computeAccuracy/4,
-                                featureSize/1,
-                                parameters/2,
-                                train/5]).
+:- module(softmax_regression, [ softmax_regression_initModelNoTrain/3,
+                                softmax_regression_initModelWithTrain/6,
+                                softmax_regression_classifyPoint/2,
+                                softmax_regression_classifyMatrix/5,
+                                softmax_regression_computeAccuracy/4,
+                                softmax_regression_featureSize/1,
+                                softmax_regression_parameters/2,
+                                softmax_regression_train/5]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -35,7 +35,7 @@
 %%              Initializes the softmax_regression model without training.
 %%              Be sure to use Train before calling Classif or ComputeAccuracy, otherwise the results may be meaningless.
 %%
-initModelNoTrain(InputSize, NumClasses, FitIntercept) :-
+softmax_regression_initModelNoTrain(InputSize, NumClasses, FitIntercept) :-
         InputSize > 0,
         NumClasses >= 0,
         initModelNoTrainI(InputSize, NumClasses, FitIntercept).
@@ -56,7 +56,7 @@ foreign(initModelNoTrain, c, initModelNoTrainI( +integer, +integer,
 %% --Description--
 %%              Initializes the softmax_regression model and trains it.
 %%
-initModelWithTrain(DataList, DataRows, LabelsList, NumClasses, Lambda, FitIntercept) :-
+softmax_regression_initModelWithTrain(DataList, DataRows, LabelsList, NumClasses, Lambda, FitIntercept) :-
         NumClasses >= 0,
         Lambda >= 0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
@@ -78,7 +78,7 @@ foreign(initModelWithTrain, c, initModelWithTrainI(     +pointer(float_array), +
 %% --Description--
 %%              Classify the given point.
 %%
-classifyPoint(DataList, Prediction) :-
+softmax_regression_classifyPoint(DataList, Prediction) :-
         convert_list_to_float_array(DataList, array(Xsize, X)),
         classifyPointI(X, Xsize, Prediction).
 
@@ -96,7 +96,7 @@ foreign(classifyPoint, c, classifyPointI(+pointer(float_array), +integer,
 %% --Description--
 %%              Classify the given points, returning class probabilities and predicted class label for each point.
 %%
-classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols) :-
+softmax_regression_classifyMatrix(DataList, DataRows, PredictionList, ProbsList, ZCols) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrows, X)),
         classifyMatrixI(X, Xsize, Xrows, Y, Ysize, Z, ZCols, ZRows),
         convert_float_array_to_list(Y, Ysize, PredictionList),
@@ -118,7 +118,7 @@ foreign(classifyMatrix, c, classifyMatrixI(     +pointer(float_array), +integer,
 %%              Computes accuracy of the learned model given the feature data and the labels associated with each data point.
 %%              Predictions are made using the provided data and are compared with the actual labels.
 %%
-computeAccuracy(DataList, DataRows, LabelsList, Accuracy) :-
+softmax_regression_computeAccuracy(DataList, DataRows, LabelsList, Accuracy) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(LabelsList, array(Ysize, Y)),
         computeAccuracyI(X, Xsize, Xrownum, Y, Ysize, Accuracy).
@@ -135,7 +135,7 @@ foreign(computeAccuracy, c, computeAccuracyI(   +pointer(float_array), +integer,
 %% --Description--
 %%              Gets the features size of the training data.
 %%
-featureSize(FeatureSize) :-
+softmax_regression_featureSize(FeatureSize) :-
         featureSizeI(FeatureSize).
 
 foreign(featureSize, c, featureSizeI([-integer])).
@@ -148,7 +148,7 @@ foreign(featureSize, c, featureSizeI([-integer])).
 %% --Description--
 %%              Get the model parameters.
 %%
-parameters(PrametersList, XCols) :-
+softmax_regression_parameters(PrametersList, XCols) :-
         parametersI(X, XCols, XRows),
         convert_float_array_to_2d_list(X, XCols, XRows, PrametersList).
 
@@ -166,7 +166,7 @@ foreign(parameters, c, parametersI(-pointer(float_array), -integer, -integer)).
 %% --Description--
 %%              Trains the softmax regression model with the given training data.
 %%
-train(DataList, DataRows, LabelsList, NumClasses, FinalValue) :-
+softmax_regression_train(DataList, DataRows, LabelsList, NumClasses, FinalValue) :-
         NumClasses >= 0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(LabelsList, array(Ysize, Y)),
diff --git a/src/methods/softmax_regression/softmax_regression_test.pl b/src/methods/softmax_regression/softmax_regression_test.pl
index 6612ce1f49f7151a2bf2ba6af199bb31f27ee5e9..99dd60b36c20b2cf2a6bff6779112143fc378ce9 100644
--- a/src/methods/softmax_regression/softmax_regression_test.pl
+++ b/src/methods/softmax_regression/softmax_regression_test.pl
@@ -7,137 +7,137 @@
 :- use_module('../../helper_files/helper.pl').
 
 reset_Model_NoTrain :-
-        initModelNoTrain(3, 2, 0).
+        softmax_regression_initModelNoTrain(3, 2, 0).
 
 reset_Model_WithTrain :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 0).
+        softmax_regression_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 0).
 
 %%
-%% TESTING predicate initModelNoTrain/3
+%% TESTING predicate softmax_regression_initModelNoTrain/3
 %%
-:- begin_tests(initModelNoTrain).      
+:- begin_tests(softmax_regression_initModelNoTrain).      
 
 %% Failure Tests
                                             
 test(softmax_InitNoTrain_Negative_InputSize, fail) :-
-        initModelNoTrain(-1, 0, 0).
+        softmax_regression_initModelNoTrain(-1, 0, 0).
 
 test(softmax_InitNoTrain_Negative_InputSize, fail) :-
-        initModelNoTrain(3, -1, 0).
+        softmax_regression_initModelNoTrain(3, -1, 0).
         
 
 %% Successful Tests
 
 test(softmax_InitNoTrain_FitIntercept_False) :-
-        initModelNoTrain(3, 2, 0).
+        softmax_regression_initModelNoTrain(3, 2, 0).
 
 test(softmax_InitNoTrain_FitIntercept_True) :-
-        initModelNoTrain(2, 3, 1).
+        softmax_regression_initModelNoTrain(2, 3, 1).
 
-:- end_tests(initModelNoTrain).
+:- end_tests(softmax_regression_initModelNoTrain).
 
 
 %%
-%% TESTING predicate initModelWithTrain/6
+%% TESTING predicate softmax_regression_initModelWithTrain/6
 %%
-:- begin_tests(initModelWithTrain).      
+:- begin_tests(softmax_regression_initModelWithTrain).      
 
 %% Failure Tests
                                             
 
 test(softmax_InitWithTrain_Negative_NumClass, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -1, 0.0001, 0).
+        softmax_regression_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -1, 0.0001, 0).
         
 test(softmax_InitWithTrain_Negative_Lambda, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, -0.0001, 0).
+        softmax_regression_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, -0.0001, 0).
 
 test(softmax_InitWithTrain_Wrong_Label_Dims1, [error(_,system_error('element-wise multiplication: incompatible matrix dimensions: 2x4 and 2x2'))]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0.0001, 0).
+        softmax_regression_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 0.0001, 0).
 
 
 test(softmax_InitWithTrain_Wrong_Label_Dims2, [error(_,system_error('element-wise multiplication: incompatible matrix dimensions: 2x4 and 2x7'))]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,0,1], 2, 0.0001, 0).
+        softmax_regression_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,0,1], 2, 0.0001, 0).
 
 %% Doesnt cause exception
 test(softmax_InitWithTrain_Wrong_Label_Value) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, 0.0001, 0).
+        softmax_regression_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, 0.0001, 0).
 
 %% Doesnt cause exception
 test(softmax_InitWithTrain_Too_Many_Label_Value) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [1,1,0,2], 2, 0.0001, 0).
+        softmax_regression_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [1,1,0,2], 2, 0.0001, 0).
         
 
 %% Successful Tests
 
 test(softmax_InitWithTrain_Direct_Input) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 0).
+        softmax_regression_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 0.0001, 0).
 
 test(softmax_InitWithTrain_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0.003, 1).
+        softmax_regression_initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0.003, 1).
 
-:- end_tests(initModelWithTrain).
+:- end_tests(softmax_regression_initModelWithTrain).
 
 
 %%
-%% TESTING predicate classifyPoint/2
+%% TESTING predicate softmax_regression_classifyPoint/2
 %%
-:- begin_tests(classifyPoint).      
+:- begin_tests(softmax_regression_classifyPoint).      
 
 %% Failure Tests
            
 %% Doesnt cause an exception                                 
 test(softmax_ClassifyPoint_On_Untrained_Model) :-
         reset_Model_NoTrain,
-        classifyPoint([5.1,3.5,1.4], _).
+        softmax_regression_classifyPoint([5.1,3.5,1.4], _).
 
 test(softmax_ClassifyPoint_With_Too_Big_Dims, [error(_,system_error('SoftmaxRegression::Classify(): dataset has 5 dimensions, but model has 3 dimensions!'))]) :-
         reset_Model_WithTrain,
-        classifyPoint([5.1,3.5,1.4,5.2,3.2], _).
+        softmax_regression_classifyPoint([5.1,3.5,1.4,5.2,3.2], _).
 
 test(softmax_ClassifyPoint_With_Too_Small_Dims, [error(_,system_error('SoftmaxRegression::Classify(): dataset has 2 dimensions, but model has 3 dimensions!'))]) :-
         reset_Model_WithTrain,
-        classifyPoint([5.1,3.5], _).
+        softmax_regression_classifyPoint([5.1,3.5], _).
         
 
 %% Successful Tests
 
 test(softmax_ClassifyPoint) :-
         reset_Model_WithTrain,
-        classifyPoint([4.1,2.5,1.4], Prediction),
+        softmax_regression_classifyPoint([4.1,2.5,1.4], Prediction),
         print('\nPrediction: '),
         print(Prediction).
 
-:- end_tests(classifyPoint).
+:- end_tests(softmax_regression_classifyPoint).
 
 
 %%
-%% TESTING predicate classifyMatrix/5
+%% TESTING predicate softmax_regression_classifyMatrix/5
 %%
-:- begin_tests(classifyMatrix).      
+:- begin_tests(softmax_regression_classifyMatrix).      
 
 %% Failure Tests
                         
 %% Doesnt cause an exception                    
 test(softmax_ClassifyMatrix_On_Untrained_Model) :-
         reset_Model_NoTrain,
-        classifyMatrix([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5], 3, _, _, _).
+        softmax_regression_classifyMatrix([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5], 3, _, _, _).
 
 test(softmax_ClassifyMatrix_With_Too_Big_Dims, [error(_,system_error('SoftmaxRegression::Classify(): dataset has 5 dimensions, but model has 3 dimensions!'))]) :-
         reset_Model_WithTrain,
-        classifyMatrix([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5], 5, _, _, _).
+        softmax_regression_classifyMatrix([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5], 5, _, _, _).
 
 test(softmax_ClassifyMatrix_With_Too_Small_Dims, [error(_,system_error('SoftmaxRegression::Classify(): dataset has 2 dimensions, but model has 3 dimensions!'))]) :-
         reset_Model_WithTrain,
-        classifyMatrix([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5], 2, _, _, _).
+        softmax_regression_classifyMatrix([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5], 2, _, _, _).
         
 
 %% Successful Tests
 
 test(softmax_ClassifyMatrix_Direct_Input) :-
         reset_Model_WithTrain,
-        classifyMatrix([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5], 3, PredicList, ProbsList, _),
+        softmax_regression_classifyMatrix([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5], 3, PredicList, ProbsList, _),
         print('\nPredicted Labels: '),
         print(PredicList),
         print('\nProbabilities: '),
@@ -146,50 +146,50 @@ test(softmax_ClassifyMatrix_Direct_Input) :-
 test(softmax_ClassifyMatrix_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0.003, 1),
-        classifyMatrix(Data, 4, PredicList, ProbsList, _),
+        softmax_regression_initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0.003, 1),
+        softmax_regression_classifyMatrix(Data, 4, PredicList, ProbsList, _),
         print('\nPredicted Labels: '),
         print(PredicList),
         print('\nProbabilities: '),
         print(ProbsList).
 
-:- end_tests(classifyMatrix).
+:- end_tests(softmax_regression_classifyMatrix).
 
 
 %%
-%% TESTING predicate computeAccuracy/4
+%% TESTING predicate softmax_regression_computeAccuracy/4
 %%
-:- begin_tests(computeAccuracy).      
+:- begin_tests(softmax_regression_computeAccuracy).      
 
 %% Failure Tests
 
 %% Doesnt cause an exception
 test(softmax_ComputeAccuracy_On_Untrained_Model) :-
         reset_Model_NoTrain,
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], _).
+        softmax_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], _).
 
 test(softmax_ComputeAccuracy_Wrong_Label_Dims1, [error(_,system_error('The Labels Vector has the wrong Dimension!'))]) :-
         reset_Model_WithTrain,
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], _).
+        softmax_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], _).
 
 %% Doesnt cause exception
 test(softmax_ComputeAccuracy_Wrong_Label_Dims2) :-
         reset_Model_WithTrain,
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,0,1], _).
+        softmax_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,0,1], _).
 
 %% The same when the label values are out of range
 test(softmax_ComputeAccuracy_Wrong_Label_Value) :-
         reset_Model_WithTrain,
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], _).
+        softmax_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], _).
 
 %% Doesnt cause an exception
 test(softmax_ComputeAccuracy_Too_Many_Label_Value) :-
         reset_Model_WithTrain,
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [1,1,0,2], _).
+        softmax_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [1,1,0,2], _).
 
 test(softmax_ComputeAccuracy_Wrong_Data_Dims, [error(_,system_error('SoftmaxRegression::Classify(): dataset has 4 dimensions, but model has 3 dimensions!'))]) :-
         reset_Model_WithTrain,
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], Accuracy),
+        softmax_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], Accuracy),
         print('\nAccuracy: '),
         print(Accuracy).
 
@@ -198,25 +198,25 @@ test(softmax_ComputeAccuracy_Wrong_Data_Dims, [error(_,system_error('SoftmaxRegr
 
 test(softmax_ComputeAccuracy_Direct_Input) :-
         reset_Model_WithTrain,
-        computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], Accuracy),
+        softmax_regression_computeAccuracy([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], Accuracy),
         print('\nAccuracy: '),
         print(Accuracy).
 
 test(softmax_ComputeAccuracy_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0.003, 1),
-        computeAccuracy([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2], 4, [0,1,0,1], Accuracy),
+        softmax_regression_initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0.003, 1),
+        softmax_regression_computeAccuracy([3, 2, 0, 5, 1, 4, 1, 0, 4, 3, 3, 5, 0, 5, 5, 2], 4, [0,1,0,1], Accuracy),
         print('\nAccuracy: '),
         print(Accuracy).
 
-:- end_tests(computeAccuracy).
+:- end_tests(softmax_regression_computeAccuracy).
 
 
 %%
-%% TESTING predicate featureSize/1
+%% TESTING predicate softmax_regression_featureSize/1
 %%
-:- begin_tests(featureSize).      
+:- begin_tests(softmax_regression_featureSize).      
 
 %% Failure Tests
                                             
@@ -227,31 +227,31 @@ test(softmax_ComputeAccuracy_CSV_Input) :-
 
 test(softmax_FeatureSize_No_Train) :-
         reset_Model_NoTrain,
-        featureSize(FeatureSize),
+        softmax_regression_featureSize(FeatureSize),
         print('\nFeatureSize: '),
         print(FeatureSize).
 
 test(softmax_FeatureSize_Direct_Input) :-
         reset_Model_WithTrain,
-        featureSize(FeatureSize),
+        softmax_regression_featureSize(FeatureSize),
         print('\nFeatureSize: '),
         print(FeatureSize).
 
 test(softmax_FeatureSize_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0.003, 1),
-        featureSize(FeatureSize),
+        softmax_regression_initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0.003, 1),
+        softmax_regression_featureSize(FeatureSize),
         print('\nFeatureSize: '),
         print(FeatureSize).
 
-:- end_tests(featureSize).
+:- end_tests(softmax_regression_featureSize).
 
 
 %%
-%% TESTING predicate parameters/2
+%% TESTING predicate softmax_regression_parameters/2
 %%
-:- begin_tests(parameters).      
+:- begin_tests(softmax_regression_parameters).      
 
 %% Failure Tests
         
@@ -260,80 +260,80 @@ test(softmax_FeatureSize_CSV_Input) :-
 
 test(softmax_Parameters_No_Train) :-
         reset_Model_NoTrain,
-        parameters(PrametersList, _),
+        softmax_regression_parameters(PrametersList, _),
         print('\nParameters: '),
         print(PrametersList).
 
 test(softmax_Parameters_Direct_Input) :-
         reset_Model_WithTrain,
-        parameters(PrametersList, _),
+        softmax_regression_parameters(PrametersList, _),
         print('\nParameters: '),
         print(PrametersList).
 
 test(softmax_Parameters_CSV_Input) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0.003, 1),
-        parameters(PrametersList, _),
+        softmax_regression_initModelWithTrain(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 0.003, 1),
+        softmax_regression_parameters(PrametersList, _),
         print('\nParameters: '),
         print(PrametersList).
 
-:- end_tests(parameters).
+:- end_tests(softmax_regression_parameters).
 
 
 %%
-%% TESTING predicate train/5
+%% TESTING predicate softmax_regression_train/5
 %%
-:- begin_tests(train).      
+:- begin_tests(softmax_regression_train).      
 
 %% Failure Tests
                                             
 test(softmax_Train_Negative_NumClass, fail) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -1, _).
+        softmax_regression_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -1, _).
 
 test(softmax_Train_Wrong_Label_Dims1, [error(_,system_error('element-wise multiplication: incompatible matrix dimensions: 2x4 and 2x2'))]) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, _).
+        softmax_regression_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, _).
 
 %% If the label vector is to long it seems to cause no problems
 test(softmax_Train_Wrong_Label_Dims2, [error(_,system_error('element-wise multiplication: incompatible matrix dimensions: 2x4 and 2x7'))]) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,0,1], 2, _).
+        softmax_regression_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,0,1], 2, _).
 
 %% The same when the label values are out of range
 test(softmax_Train_Wrong_Label_Value) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, _).
+        softmax_regression_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,-1,0,-1], 2, _).
 
 %% doesnt cause a exeption
 test(softmax_Train_Too_Many_Label_Value) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [1,1,0,2], 2, _).
+        softmax_regression_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [1,1,0,2], 2, _).
 
 %% doesnt cause a exeption
 test(softmax_Train_Wrong_Data_Dims) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 2, _).
+        softmax_regression_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, [0,1,0], 2, _).
    
 
 %% Successful Tests
 
 test(softmax_Train_Direct_Input) :-
         reset_Model_NoTrain,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, FinalValue),
+        softmax_regression_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, FinalValue),
         print('\nFinalValue: '),
         print(FinalValue).
 
 test(softmax_Train_CSV_Input) :-
-        initModelNoTrain(4, 2, 0),
+        softmax_regression_initModelNoTrain(4, 2, 0),
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, FinalValue),
+        softmax_regression_train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, FinalValue),
         print('\nFinalValue: '),
         print(FinalValue).
 
-:- end_tests(train).
+:- end_tests(softmax_regression_train).
 
 
 run_softmax_regression_tests :-
diff --git a/src/methods/sparse_coding/sparse_coding.pl b/src/methods/sparse_coding/sparse_coding.pl
index 5b9c4f0fa99c136564190593325f008093c81aee..02aea9d87e69e54a78e2a7f01d7be3f5363d89b7 100644
--- a/src/methods/sparse_coding/sparse_coding.pl
+++ b/src/methods/sparse_coding/sparse_coding.pl
@@ -1,8 +1,8 @@
 
-:- module(sparse_coding, [      initModelWithTrain/8,
-                                initModelNoTrain/6,
-                                encode/4,
-                                train/3]).
+:- module(sparse_coding, [      sparse_coding_initModelWithTrain/8,
+                                sparse_coding_initModelNoTrain/6,
+                                sparse_coding_encode/4,
+                                sparse_coding_train/3]).
 
 %% requirements of library(struct)
 :- load_files(library(str_decl),
@@ -17,7 +17,7 @@
         float32          = float_32,
         float_array      = array(float32).
 
-%% definitions for the connected function
+%% definitions for the connected functionsparse_coding_
 
 
 
@@ -36,7 +36,7 @@
 %% --Description--
 %%              Initializes sparse_coding model and trains it.
 %%
-initModelWithTrain(MatList, MatRows, Atoms, Lambda1, Lambda2, MaxIterations, ObjTolerance, NewtonTolerance) :-
+sparse_coding_initModelWithTrain(MatList, MatRows, Atoms, Lambda1, Lambda2, MaxIterations, ObjTolerance, NewtonTolerance) :-
         Atoms >= 0,
         Lambda1 > 0,
         Lambda2 >= 0,
@@ -64,7 +64,7 @@ foreign(initModelWithTrain, c, initModelWithTrainI(      +pointer(float_array),
 %% --Description--
 %%              Initializes sparse_coding model but will not train the model, and a subsequent call to Train will be required before the model can encode points with Encode.
 %%
-initModelNoTrain(Atoms, Lambda1, Lambda2, MaxIterations, ObjTolerance, NewtonTolerance) :-
+sparse_coding_initModelNoTrain(Atoms, Lambda1, Lambda2, MaxIterations, ObjTolerance, NewtonTolerance) :-
         Atoms >= 0,
         Lambda1 > 0,
         Lambda2 >= 0,
@@ -86,7 +86,7 @@ foreign(initModelNoTrain, c, initModelNoTrainI(+integer, +float32, +float32, +in
 %% --Description--
 %%              Sparse code each point in the given dataset via LARS, using the current dictionary and store the encoded data in the codes matrix.
 %%
-encode(DataList, DataRows, CodesList, YCols) :-
+sparse_coding_encode(DataList, DataRows, CodesList, YCols) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         encodeI(X, Xsize, Xrownum, Y, YCols, YRows),
         convert_float_array_to_2d_list(Y, YCols, YRows, CodesList).
@@ -105,7 +105,7 @@ foreign(encode, c, encodeI(      +pointer(float_array), +integer, +integer,
 %% --Description--
 %%              Train the sparse coding model on the given dataset.
 %%
-train(DataList, DataRows, ReturnValue) :-
+sparse_coding_train(DataList, DataRows, ReturnValue) :-
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         trainI(X, Xsize, Xrownum, ReturnValue).
 
diff --git a/src/methods/sparse_coding/sparse_coding_test.pl b/src/methods/sparse_coding/sparse_coding_test.pl
index 6290c0a4f210b89691b02b40c155875482b0e2d1..76b1149264671aa3ef7bc75710e6c94c32567f34 100644
--- a/src/methods/sparse_coding/sparse_coding_test.pl
+++ b/src/methods/sparse_coding/sparse_coding_test.pl
@@ -10,107 +10,107 @@
 reset_Model_With_Train_A :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,20, Data),
-        initModelWithTrain(Data,4,15,0.1,0.0,100,0.01,0.000001).
+        sparse_coding_initModelWithTrain(Data,4,15,0.1,0.0,100,0.01,0.000001).
 reset_Model_No_Train_A :-
-        initModelNoTrain(2,0.1,0.0,100,0.01,0.000001).
+        sparse_coding_initModelNoTrain(2,0.1,0.0,100,0.01,0.000001).
 
 
 %%
-%% TESTING predicate initModelWithTrain/8
+%% TESTING predicate sparse_coding_initModelWithTrain/8
 %%
-:- begin_tests(initModelWithTrain).      
+:- begin_tests(sparse_coding_initModelWithTrain).      
 
 %% Failure Tests
                                             
 test(initModelWithTrain_Negative_RowNum, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], -3,15,0.1,0.0,100,0.01,0.000001).
+        sparse_coding_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], -3,15,0.1,0.0,100,0.01,0.000001).
 
 test(initModelWithTrain_Negative_Atoms, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3,-15,0.1,0.0,100,0.01,0.000001).
+        sparse_coding_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3,-15,0.1,0.0,100,0.01,0.000001).
 
 test(initModelWithTrain_Negative_MaxIterations, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3,15,0.1,0.0,-100,0.01,0.000001).
+        sparse_coding_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3,15,0.1,0.0,-100,0.01,0.000001).
 
 test(initModelWithTrain_Negative_ObjTolerance, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3,15,0.1,0.0,100,-0.01,0.000001).
+        sparse_coding_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3,15,0.1,0.0,100,-0.01,0.000001).
 
 test(initModelWithTrain_Negative_NewtonTolerance, fail) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3,15,0.1,0.0,100,0.01,-0.000001).
+        sparse_coding_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3,15,0.1,0.0,100,0.01,-0.000001).
 
 test(initModelWithTrain_Empty_List, fail) :-
-        initModelWithTrain([], 3,15,0.1,0.0,100,0.01,-0.000001).
+        sparse_coding_initModelWithTrain([], 3,15,0.1,0.0,100,0.01,-0.000001).
 
 test(initModelWithTrain_Data_Amount_Smaller_Than_Atoms, [error(_,system_error('There have to be more DataPoints than Atoms!'))]) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4,10,0.1,0.0,2,0.01,0.000001).
+        sparse_coding_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4,10,0.1,0.0,2,0.01,0.000001).
         
 
 %% Successful Tests
 
 test(initModelWithTrain_Direct_Input_Use) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 3,0.1,0.0,2,0.01,0.000001).
+        sparse_coding_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, 3,0.1,0.0,2,0.01,0.000001).
 
 test(initModelWithTrain_Changed_Input) :-
-        initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 6,2,2.3,0.1,30,0.002,0.0001).
+        sparse_coding_initModelWithTrain([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 6,2,2.3,0.1,30,0.002,0.0001).
 
 test(initModelWithTrain_CSV_Use) :-
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,20, Data),
-        initModelWithTrain(Data,4,15,0.1,0.0,100,0.01,0.000001).
+        sparse_coding_initModelWithTrain(Data,4,15,0.1,0.0,100,0.01,0.000001).
 
-:- end_tests(initModelWithTrain).
+:- end_tests(sparse_coding_initModelWithTrain).
 
 
 %%
-%% TESTING predicate initModelNoTrain/6
+%% TESTING predicate sparse_coding_initModelNoTrain/6
 %%
-:- begin_tests(initModelNoTrain).      
+:- begin_tests(sparse_coding_initModelNoTrain).      
 
 %% Failure Tests
 
 test(initModelNoTrain_Negative_Atoms, fail) :-
-        initModelNoTrain(-15,0.1,0.0,100,0.01,0.000001).
+        sparse_coding_initModelNoTrain(-15,0.1,0.0,100,0.01,0.000001).
 
 test(initModelNoTrain_Negative_MaxIterations, fail) :-
-        initModelNoTrain(15,0.1,0.0,-100,0.01,0.000001).
+        sparse_coding_initModelNoTrain(15,0.1,0.0,-100,0.01,0.000001).
 
 test(initModelNoTrain_Negative_ObjTolerance, fail) :-
-        initModelNoTrain(15,0.1,0.0,100,-0.01,0.000001).
+        sparse_coding_initModelNoTrain(15,0.1,0.0,100,-0.01,0.000001).
 
 test(initModelNoTrain_Negative_NewtonTolerance, fail) :-
-        initModelNoTrain(15,0.1,0.0,100,0.01,-0.000001).
+        sparse_coding_initModelNoTrain(15,0.1,0.0,100,0.01,-0.000001).
 
 test(initModelNoTrain_Empty_List, fail) :-
-        initModelNoTrain(15,0.1,0.0,100,0.01,-0.000001).
+        sparse_coding_initModelNoTrain(15,0.1,0.0,100,0.01,-0.000001).
         
 
 %% Successful Tests
 
 test(initModelNoTrain_Normal_Input_Use) :-
-        initModelNoTrain(15,0.1,0.0,100,0.01,0.000001).
+        sparse_coding_initModelNoTrain(15,0.1,0.0,100,0.01,0.000001).
 
 test(initModelNoTrain_Changed_Input) :-
-        initModelNoTrain(5,2.3,0.2,200,0.002,0.0001).
+        sparse_coding_initModelNoTrain(5,2.3,0.2,200,0.002,0.0001).
 
-:- end_tests(initModelNoTrain).
+:- end_tests(sparse_coding_initModelNoTrain).
 
 
 %%
-%% TESTING predicate encode/4
+%% TESTING predicate sparse_coding_encode/4
 %%
-:- begin_tests(encode).      
+:- begin_tests(sparse_coding_encode).      
 
 %% Failure Tests
 
 test(encode_With_No_Trained_Model, [error(_,system_error('Matrix has not the correct dimensions!'))]) :-
         reset_Model_No_Train_A,
-        encode([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, CodesList, CodesRows),
+        sparse_coding_encode([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, CodesList, CodesRows),
         print(CodesList),
         print('\n'),
         print(CodesRows).
 
 test(encode_With_Not_Fitting_Data, [error(_,system_error('Matrix has not the correct dimensions!'))]) :-
         reset_Model_No_Train_A,
-        encode([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 2, CodesList, CodesRows),
+        sparse_coding_encode([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 2, CodesList, CodesRows),
         print(CodesList),
         print('\n'),
         print(CodesRows).
@@ -120,7 +120,7 @@ test(encode_With_Not_Fitting_Data, [error(_,system_error('Matrix has not the cor
 
 test(encode_Normal_Use1) :-
         reset_Model_With_Train_A,
-        encode([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, CodesList, CodesRows),
+        sparse_coding_encode([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, CodesList, CodesRows),
         print(CodesList),
         print('\n'),
         print(CodesRows).
@@ -130,50 +130,50 @@ test(encode_Normal_Use2) :-
         reset_Model_No_Train_A,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, _),
-        encode([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, CodesList, CodesRows),
+        sparse_coding_train(Data, 4, _),
+        sparse_coding_encode([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, CodesList, CodesRows),
         print(CodesList),
         print('\n'),
         print(CodesRows).
 
-:- end_tests(encode).
+:- end_tests(sparse_coding_encode).
 
 
 %%
-%% TESTING predicate train/3
+%% TESTING predicate sparse_coding_train/3
 %%
-:- begin_tests(train).      
+:- begin_tests(sparse_coding_train).      
 
 %% Failure Tests
                                             
 test(train_With_Negative_RowNum, fail) :-
         reset_Model_No_Train_A,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], -3, _).
+        sparse_coding_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], -3, _).
 
 test(train_With_Empty_Data_List1, fail) :-
         reset_Model_No_Train_A,
-        train([], 1, _).
+        sparse_coding_train([], 1, _).
 
 test(train_With_Empty_Data_List2, fail) :-
         reset_Model_No_Train_A,
-        train([], 0, _).
+        sparse_coding_train([], 0, _).
         
 
 %% Successful Tests
 
 test(train_With_Direkt_Input) :-
         reset_Model_No_Train_A,
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, ObjectiveValue),
+        sparse_coding_train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, ObjectiveValue),
         print(ObjectiveValue).
 
 test(train_With_CSV_Input) :-
         reset_Model_No_Train_A,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,20, Data),
-        train(Data, 4, ObjectiveValue),
+        sparse_coding_train(Data, 4, ObjectiveValue),
         print(ObjectiveValue).
 
-:- end_tests(train).
+:- end_tests(sparse_coding_train).
 
 
 run_sparse_coding_tests :-