diff --git a/src/methods/random_forest/random_forest.pl b/src/methods/random_forest/random_forest.pl
index a077c465b0e62e9e762453adb8e748524a561d2a..b88d027883bfee2fd6ae87a3c4ac3a5436698cd3 100644
--- a/src/methods/random_forest/random_forest.pl
+++ b/src/methods/random_forest/random_forest.pl
@@ -53,6 +53,11 @@ foreign(initModelNoTrain, c, initModelNoTrainI(+integer)).
 %%              Initilizes the model and trains it but does not apply weights to it.
 %%
 initModelWithTrainNoWeights(DataList, DataRows, LabelsList, NumClasses, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth) :-
+        NumClasses >= 0,
+        NumTrees > 0,
+        MinimumLeafSize > 0,
+        MinimumGainSplit >= 0.0,
+        MaximumDepth >= 0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(LabelsList, array(Ysize, Y)),
         initModelWithTrainNoWeightsI(X, Xsize, Xrownum, Y, Ysize, NumClasses, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth).
@@ -79,6 +84,11 @@ foreign(initModelWithTrainNoWeights, c, initModelWithTrainNoWeightsI(
 %%              Initilizes the model, trains it and applies weights to it.
 %%
 initModelWithTrainWithWeights(DataList, DataRows, LabelsList, NumClasses, WeightsList, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth) :-
+        NumClasses >= 0,
+        NumTrees > 0,
+        MinimumLeafSize > 0,
+        MinimumGainSplit >= 0.0,
+        MaximumDepth >= 0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(LabelsList, array(Ysize, Y)),
         convert_list_to_float_array(WeightsList, array(Zsize, Z)),
@@ -165,6 +175,11 @@ foreign(numTrees, c, numTreesI([-integer])).
 %%              The minimumLeafSize and minimumGainSplit parameters are given to each individual decision tree during tree building.
 %%
 trainNoWeights(DataList, DataRows, LabelsList, NumClasses, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth, Entropy) :-
+        NumClasses >= 0,
+        NumTrees > 0,
+        MinimumLeafSize > 0,
+        MinimumGainSplit >= 0.0,
+        MaximumDepth >= 0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(LabelsList, array(Ysize, Y)),
         trainNoWeightsI(X, Xsize, Xrownum, Y, Ysize, NumClasses, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth, Entropy).
@@ -193,6 +208,11 @@ foreign(trainNoWeights, c, trainNoWeightsI(     +pointer(float_array), +integer,
 %%              The minimumLeafSize and minimumGainSplit parameters are given to each individual decision tree during tree building.
 %%
 trainWithWeights(DataList, DataRows, LabelsList, NumClasses, WeightsList, NumTrees, MinimumLeafSize, MinimumGainSplit, MaximumDepth, Entropy) :-
+        NumClasses >= 0,
+        NumTrees > 0,
+        MinimumLeafSize > 0,
+        MinimumGainSplit >= 0.0,
+        MaximumDepth >= 0,
         convert_list_to_float_array(DataList, DataRows, array(Xsize, Xrownum, X)),
         convert_list_to_float_array(LabelsList, array(Ysize, Y)),
         convert_list_to_float_array(WeightsList, array(Zsize, Z)),
diff --git a/src/methods/random_forest/random_forest_test.pl b/src/methods/random_forest/random_forest_test.pl
index 6aa93769fe818e89668a29eebfc5b4e4e11dcddd..2ee63291a549c013240572c86432f79b21e3970f 100644
--- a/src/methods/random_forest/random_forest_test.pl
+++ b/src/methods/random_forest/random_forest_test.pl
@@ -6,38 +6,370 @@
 :- use_module(random_forest).
 :- use_module('../../helper_files/helper.pl').
 
-reset_Model :-
-        initModel(1,0,50,0.0001).
+reset_Model_NoTrain :-
+        initModelNoTrain.
+
+reset_Model_WithTrain :-
+        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, 0).
+
+
+%%
+%% TESTING predicate initModelNoTrainI/0
+%%
+:- begin_tests(initModelNoTrainI).      
+
+%% Failure Tests
+%% Successful Tests
+
+test(random_forest_InitModelNoTrain) :-
+        initModelNoTrain.
+
+:- end_tests(initModelNoTrainI).
+
+
+
+%%
+%% TESTING predicate initModelWithTrainNoWeights/8
+%%
+:- begin_tests(initModelWithTrainNoWeights).      
+
+%% Failure Tests
+                                            
+test(random_forest_InitModelWithTrainNoWeights_Negative_NumClasses, fail) :-
+        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainNoWeights_Too_Many_NumClasses, fail) :-
+        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 15, 20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainNoWeights_Negative_NumTrees, fail) :-
+        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, -20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainNoWeights_Negative_MinLeafSize, fail) :-
+        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, -1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainNoWeights_Negative_MinGainSplit, fail) :-
+        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, -0.0000001, 0).
+
+test(random_forest_InitModelWithTrainNoWeights_Negative_MaxDepth, fail) :-
+        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, -1).
+
+
+test(random_forest_InitModelWithTrainNoWeights_Too_Short_Label, [error(_,system_error('Error'))]) :-
+        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainNoWeights_Too_Long_Label, [error(_,system_error('Error'))]) :-
+        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainNoWeights_Too_Many_Label_Classes, [error(_,system_error('Error'))]) :-
+        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 20, 1, 0.0000001, 0).
+        
+
+%% Successful Tests
+
+test(random_forest_InitModelWithTrainNoWeights_Normal_Use) :-
+        initModelWithTrainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainNoWeights_CSV_Input) :-
+        open('src/data_csv/iris2.csv', read, File),
+        take_csv_row(File, skipFirstRow,10, Data),
+        initModelWithTrainNoWeights(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 20, 1, 0.0000001, 0).
+
+:- end_tests(initModelWithTrainNoWeights).
+
+
 
 %%
-%% TESTING predicate predicate/10
+%% TESTING predicate initModelWithTrainWithWeights/9
 %%
-:- begin_tests(predicate).      
+:- begin_tests(initModelWithTrainWithWeights).      
 
 %% Failure Tests
                                             
-test(testDescription, [error(domain_error('expectation' , culprit), _)]) :-
-        reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, culprit, 50, 0.0001, _).
+test(random_forest_InitModelWithTrainWithWeights_Negative_NumClasses, fail) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainWithWeights_Too_Many_NumClasses, fail) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 15, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainWithWeights_Negative_NumTrees, fail) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], -20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainWithWeights_Negative_MinLeafSize, fail) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, -1, 0.0000001, 0).
+
+test(random_forest_initModelWithTrainWithWeights_Negative_MinGainSplit, fail) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, -0.0000001, 0).
+
+test(random_forest_InitModelWithTrainWithWeights_Negative_MaxDepth, fail) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, -1).
+
+
+test(random_forest_InitModelWithTrainWithWeights_Too_Short_Label, [error(_,system_error('Error'))]) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainWithWeights_Too_Long_Label, [error(_,system_error('Error'))]) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainWithWeights_Too_Many_Label_Classes, [error(_,system_error('Error'))]) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
+
+
+test(random_forest_InitModelWithTrainWithWeights_Too_Short_Weights, [error(_,system_error('Error'))]) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1], 20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainWithWeights_Too_Long_Weights, [error(_,system_error('Error'))]) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2,0.43,2.0], 20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainWithWeights_Too_Many_Weights_Classes, [error(_,system_error('Error'))]) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [-0.5,0.1,-0.7,1.2], 20, 1, 0.0000001, 0).
+        
+
+%% Successful Tests
+
+test(random_forest_InitModelWithTrainWithWeights_Normal_Use) :-
+        initModelWithTrainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
+
+test(random_forest_InitModelWithTrainWithWeights_CSV_Input) :-
+        open('src/data_csv/iris2.csv', read, File),
+        take_csv_row(File, skipFirstRow,10, Data),
+        initModelWithTrainWithWeights(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0).
+
+:- end_tests(initModelWithTrainWithWeights).
+
+
+
+%%
+%% TESTING predicate classifyPoint/3
+%%
+:- begin_tests(classifyPoint).      
+
+%% Failure Tests
+                                            
+test(random_forest_ClassifyPoint_Before_Train, [error(_,system_error('Error'))]) :-
+        reset_Model_NoTrain,
+        classifyPoint([5.1,3.5,1.4], _, _).
+
+test(random_forest_ClassifyPoint_Smaller_Dim_To_Train, [error(_,system_error('Error'))]) :-
+        reset_Model_WithTrain,
+        classifyPoint([5.1,3.5], _, _).
+
+test(random_forest_ClassifyPoint_Larger_Dim_To_Train, [error(_,system_error('Error'))]) :-
+        reset_Model_WithTrain,
+        classifyPoint([5.1,3.5,1.4,4.3,0.4], _, _).
+        
+
+%% Successful Tests
+
+test(random_forest_ClassifyPoint_Normal_Use) :-
+        reset_Model_WithTrain,
+        classifyPoint([5.1,3.5,1.4], Prediction, ProbabilitiesList),
+        print('\nPrediction: '),
+        print(Prediction),
+        print('\nProbabilities: '),
+        print(ProbabilitiesList).
+
+:- end_tests(classifyPoint).
+
+
+
+%%
+%% TESTING predicate classifyMatrix/5
+%%
+:- begin_tests(classifyMatrix).      
+
+%% Failure Tests
+                                            
+test(random_forest_ClassifyMatrix_Before_Train, [error(_,system_error('Error'))]) :-
+        reset_Model_NoTrain,
+        classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, _, _, _).
+
+test(random_forest_ClassifyMatrix_Smaller_Dim_To_Train, [error(_,system_error('Error'))]) :-
+        reset_Model_WithTrain,
+        classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 4, _, _, _).
+
+test(random_forest_ClassifyMatrix_Larger_Dim_To_Train, [error(_,system_error('Error'))]) :-
+        reset_Model_WithTrain,
+        classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 2, _, _, _).
+        
+
+%% Successful Tests
+
+test(random_forest_ClassifyMatrix_Normal_Use) :-
+        reset_Model_WithTrain,
+        classifyMatrix([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, Prediction, ProbabilitiesList, _),
+        print('\nPrediction: '),
+        print(Prediction),
+        print('\nProbabilities: '),
+        print(ProbabilitiesList).
+
+:- end_tests(classifyMatrix).
+
+
+
+%%
+%% TESTING predicate numTrees/10
+%%
+:- begin_tests(numTrees).      
+
+%% Failure Tests
+                                            
+test(random_forest_NumTrees_Before_Train, [error(_,system_error('Error'))]) :-
+        reset_Model_NoTrain,
+        numTrees(_).
+        
+
+%% Successful Tests
+
+test(random_forest_NumTrees_Normal_Use) :-
+        reset_Model_WithTrain,
+        numTrees(NumTrees),
+        print('\nNumber of Trees: '),
+        print(NumTrees).
+
+:- end_tests(numTrees).
+
+
+
+%%
+%% TESTING predicate trainNoWeights/10
+%%
+:- begin_tests(trainNoWeights).      
+                         
+%% Failure Tests
+                                            
+test(random_forest_TrainNoWeights_Negative_NumClasses, fail) :-
+        reset_Model_NoTrain,
+        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, 20, 1, 0.0000001, 0, _).
+
+test(random_forest_TrainNoWeights_Too_Many_NumClasses, fail) :-
+        reset_Model_NoTrain,
+        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 15, 20, 1, 0.0000001, 0, _).
+
+test(random_forest_TrainNoWeights_Negative_NumTrees, fail) :-
+        reset_Model_NoTrain,
+        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, -20, 1, 0.0000001, 0, _).
+
+test(random_forest_TrainNoWeights_Negative_MinLeafSize, fail) :-
+        reset_Model_NoTrain,
+        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, -1, 0.0000001, 0, _).
+
+test(random_forest_TrainNoWeights_Negative_MinGainSplit, fail) :-
+        reset_Model_NoTrain,
+        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, -0.0000001, 0, _).
+
+test(random_forest_TrainNoWeights_Negative_MaxDepth, fail) :-
+        reset_Model_NoTrain,
+        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, -1, _).
+
+
+test(random_forest_TrainNoWeights_Too_Short_Label, [error(_,system_error('Error'))]) :-
+        reset_Model_NoTrain,
+        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, 20, 1, 0.0000001, 0, _).
+
+test(random_forest_TrainNoWeights_Too_Long_Label, [error(_,system_error('Error'))]) :-
+        reset_Model_NoTrain,
+        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, 20, 1, 0.0000001, 0, _).
+
+test(random_forest_TrainNoWeights_Too_Many_Label_Classes, [error(_,system_error('Error'))]) :-
+        reset_Model_NoTrain,
+        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, 20, 1, 0.0000001, 0, _).
+        
+
+%% Successful Tests
+
+test(random_forest_TrainNoWeights_Normal_Use) :-
+        reset_Model_NoTrain,
+        trainNoWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, 20, 1, 0.0000001, 0, Entropy),
+        print('\nEntropy: '),
+        print(Entropy).
+
+test(random_forest_TrainNoWeights_CSV_Input) :-
+        reset_Model_NoTrain,
+        open('src/data_csv/iris2.csv', read, File),
+        take_csv_row(File, skipFirstRow,10, Data),
+        trainNoWeights(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, 20, 1, 0.0000001, 0, Entropy),
+        print('\nEntropy: '),
+        print(Entropy).
+
+:- end_tests(trainNoWeights).
+
+
+
+%%
+%% TESTING predicate trainWithWeights/10
+%%
+:- begin_tests(trainWithWeights).      
+
+%% Failure Tests
+                                            
+test(random_forest_TrainWithWeights_Negative_NumClasses, fail) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], -2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
+
+test(random_forest_TrainWithWeights_Too_Many_NumClasses, fail) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 15, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
+
+test(random_forest_TrainWithWeights_Negative_NumTrees, fail) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], -20, 1, 0.0000001, 0, _).
+
+test(random_forest_TrainWithWeights_Negative_MinLeafSize, fail) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, -1, 0.0000001, 0, _).
+
+test(random_forest_TrainWithWeights_Negative_MinGainSplit, fail) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, -0.0000001, 0, _).
+
+test(random_forest_TrainWithWeights_Negative_MaxDepth, fail) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, -1, _).
+
+
+test(random_forest_TrainWithWeights_Too_Short_Label, [error(_,system_error('Error'))]) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
+
+test(random_forest_TrainWithWeights_Too_Long_Label, [error(_,system_error('Error'))]) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
+
+test(random_forest_TrainWithWeights_Too_Many_Label_Classes, [error(_,system_error('Error'))]) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,2,3], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, _).
+
+
+test(random_forest_TrainWithWeights_Too_Short_Weights, [error(_,system_error('Error'))]) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1], 20, 1, 0.0000001, 0, _).
+
+test(random_forest_TrainWithWeights_Too_Long_Weights, [error(_,system_error('Error'))]) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2,0.43,2.0], 20, 1, 0.0000001, 0, _).
 
-test(testDescription2, [error(_,system_error('The values of the Label have to start at 0 and be >= 0 and < the given numClass!'))]) :-
-        reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,2], 2, perceptron, 50, 0.0001, _).
+test(random_forest_TrainWithWeights_Too_Many_Weights_Classes, [error(_,system_error('Error'))]) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [-0.5,0.1,-0.7,1.2], 20, 1, 0.0000001, 0, _).
         
 
 %% Successful Tests
 
-test(testDescription3, [true(Error =:= 1)]) :-
-        reset_Model_No_Train(perceptron),
-        train([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,0,0,0], 2, perceptron, 50, 0.0001, Error).
+test(random_forest_TrainWithWeights_Normal_Use) :-
+        reset_Model_NoTrain,
+        trainWithWeights([5.1,3.5,1.4,4.9,3.0,1.4,4.7,3.2,1.3,4.6,3.1,1.5], 3, [0,1,0,1], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, Entropy),
+        print('\nEntropy: '),
+        print(Entropy).
 
-test(testDescription4, [true(Error =:= 0.9797958971132711)]) :-
-        reset_Model_No_Train(perceptron),
+test(random_forest_TrainWithWeights_CSV_Input) :-
+        reset_Model_NoTrain,
         open('src/data_csv/iris2.csv', read, File),
         take_csv_row(File, skipFirstRow,10, Data),
-        train(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, perceptron, 50, 0.0001, Error).
+        trainWithWeights(Data, 4, [0,1,0,1,1,0,1,1,1,0], 2, [0.5,0.1,0.7,1.2], 20, 1, 0.0000001, 0, Entropy),
+        print('\nEntropy: '),
+        print(Entropy).
 
-:- end_tests(predicate).
+:- end_tests(trainWithWeights).
 
 run_random_forest_tests :-
         run_tests.