diff --git a/data/Matrikelnummer.txt b/data/Matrikelnummer.txt
new file mode 100644
index 0000000000000000000000000000000000000000..5b870ff01f8aecce221ac7ed4b5c26787821c9ef
--- /dev/null
+++ b/data/Matrikelnummer.txt
@@ -0,0 +1,122 @@
+2618131
+ 5396159
+ 4195500
+ 2477916
+ 1451975
+ 1527582
+ 1902738
+ 1021949
+ 3396397
+ 5654451
+ 2344214
+ 1165119
+ 4673022
+ 1231797
+ 847962
+ 3185249
+ 3612026
+ 3561847
+ 9826007
+ 2182311
+ 4361295
+ 2880477
+ 8649586
+ 4733755
+ 2894201
+ 6395638
+ 7236398
+ 8746104
+ 7684872
+ 2938895
+ 5181005
+ 6171182
+ 8651985
+ 5875557
+ 7681752
+ 6336668
+ 4187880
+ 7260199
+ 1167683
+ 9091573
+ 2918857
+ 9216329
+ 4164654
+ 8594589
+ 3159466
+ 7604001
+ 3421741
+ 7254079
+ 1270444
+ 5645253
+ 980528
+ 708661
+ 8328098
+ 8221551
+ 3148516
+ 1800021
+ 5571110
+ 2855211
+ 7828272
+ 6925189
+ 7942598
+ 624266
+ 3187881
+ 3256903
+ 8196660
+ 6132367
+ 9453755
+ 6611707
+ 9613058
+ 9930906
+ 6200765
+ 6558831
+ 4527000
+ 2344683
+ 8898890
+ 9120266
+ 1878736
+ 4611990
+ 31286
+ 7712537
+ 5559006
+ 4883443
+ 6825506
+ 2099459
+ 7323188
+ 4974718
+ 1906938
+ 4541044
+ 9647493
+ 1634307
+ 9728419
+ 5011573
+ 2060543
+ 3382411
+ 6996707
+ 1444539
+ 3305362
+ 2574220
+ 4196813
+ 3169919
+ 515304
+ 8923309
+ 7560572
+ 660534
+ 948191
+ 434391
+ 5246351
+ 4696889
+ 5771826
+ 2645612
+ 1035896
+ 2163194
+ 4594761
+ 2139089
+ 6826880
+ 8984885
+ 8653828
+ 3789292
+ 6079828
+ 5337008
+ 4980605
+ 
\ No newline at end of file
diff --git a/src/model.py b/src/model.py
index 65d5c17a39494519c569adba49cce8aec6144d1d..e2bcde6b42dac2a58556a71aa3aba4aae901db76 100644
--- a/src/model.py
+++ b/src/model.py
@@ -195,7 +195,9 @@ class Model:
 
         # word beam search: already contains label strings
         if self.decoder_type == DecoderType.WordBeamSearch:
-            label_strs = ctc_output
+            label_strs = ctc_output[0]
+            for x in ctc_output:
+                print([''.join([self.char_list[c] for c in labelStr]) for labelStr in x])
 
         # TF decoders: label strings are contained in sparse tensor
         else:
diff --git a/src/webserver.py b/src/webserver.py
index f449d6a2438c87328c2258fc0a17b61a1adf4cbe..effdafb7b21dae2afce2ff1c47ac6ea922552e2b 100644
--- a/src/webserver.py
+++ b/src/webserver.py
@@ -43,9 +43,22 @@ def predictNach():
     model_name.decoder = WordBeamSearch(50, 'Words', 0.0, corpus.encode('utf8'), chars.encode('utf8'),
                                     word_chars.encode('utf8'))
     recognized, probability = htr_model.Model.infer_batch(model_name, batch)
+    #convert corpus to list, split at space
+    corpus = corpus.split()
+    result_list=[]
+    for name in recognized:
+        indecies = []
+        for i in range(len(corpus)):
+            if name == corpus[i]:
+                indecies.append(i)
+        if len(indecies) == 0:
+            indecies.append(-1)
+        result_list.append((name, indecies))
+    
+
 
     result = {
-        'recognized': recognized[0],
+        'recognized': result_list,
     }
     return jsonify(result)
 
@@ -59,6 +72,7 @@ def predictVor():
     image_array = image_array[:-2]
     image_array = image_array.reshape((h, w))
     preprocessor = htr_preprocessor.Preprocessor(htr.get_img_size(), dynamic_width=True, padding=16)
+    print(image_array.shape)
     processed_image = preprocessor.process_img(image_array)
     batch = htr_data_loader.Batch([processed_image], None, 1)
 
@@ -69,9 +83,23 @@ def predictVor():
                                     word_chars.encode('utf8'))
     
     recognized, probability = htr_model.Model.infer_batch(model_name, batch)
+    #convert corpus to list, split at space
+    corpus = corpus.split()
+    result_list=[]
+    for name in recognized:
+        indecies = []
+        for i in range(len(corpus)):
+            if name == corpus[i]:
+                indecies.append(i)
+        if len(indecies) == 0:
+            indecies.append(-1)
+        result_list.append((name, indecies))
+    
+
+
 
     result = {
-        'recognized': recognized[0],
+        'recognized': result_list,
     }
     return jsonify(result)