diff --git a/slides/methods.tex b/slides/methods.tex index 3457ccdb91813eae424d542f4292b442e12b2833..1828921b98e478533515792fb3453ba399d091da 100644 --- a/slides/methods.tex +++ b/slides/methods.tex @@ -3,19 +3,17 @@ \begin{frame} \frametitle{Similarity} \begin{itemize} - \item For the ranking of arguments, we measured the semantic similarity - between premise and conclusion - \item Here each word of the argument in embedded in a vector space and the - average of the vectors of the argument is calculated - \item The similarity of a premise and a conclusion is the calculated by the - angle between them + \item For ranking the arguments, we measured the semantic similarity + between the premises and conclusions + \item Each argument was embedded word-wise in an averaged vector space + \item The resulting similarity was calculated by using $cos(c, p)$ \item In the course of this experiment, we used three different embeddings \begin{itemize} \item BERT\footnote{J. Devlin, M. Chang, K. Lee, and K. Toutanova, “BERT: pre-training of deep bidirectional transformers for language understanding,”} - \item Elmo\footnote{M. E. Peters, M. Neumann, M. Iyyer, M. Gardner, C. Clark, K. Lee, and L. Zettlemoyer, “Deep + \item ELMo\footnote{M. E. Peters, M. Neumann, M. Iyyer, M. Gardner, C. Clark, K. Lee, and L. Zettlemoyer, “Deep contextualized word representations,”} - \item Glove\footnote{J. Pennington, R. Socher, and C. Manning, “Glove: Global vectors for word representation,”} + \item GloVe\footnote{J. Pennington, R. Socher, and C. Manning, “Glove: Global vectors for word representation,”} \end{itemize} \end{itemize} \end{frame} @@ -24,9 +22,8 @@ contextualized word representations,”} \begin{frame} \frametitle{Sentiment} \begin{itemize} - \item Another approach to rank the argument is to measure how positive the tone - of the premises is - \item For this, we used a sentiment neural network based on FastText\footnote{A. Joulin, E. Grave, P. Bojanowski, and T. Mikolov, “Bag of tricks for efficient text classification,”}, which was + \item As another approach we used to measure the positivity of the argument + \item Therefore, we used a sentiment neural network based on FastText\footnote{A. Joulin, E. Grave, P. Bojanowski, and T. Mikolov, “Bag of tricks for efficient text classification,”}, which was trained on film ratings of IMDb \end{itemize} \end{frame} \ No newline at end of file