Skip to content
Snippets Groups Projects
Commit e77b439a authored by Marc Feger's avatar Marc Feger
Browse files

Refactor methods.tex

parent 3afad534
No related branches found
No related tags found
No related merge requests found
Pipeline #44717 passed
bilder/pngfuel-friends.png

27 KiB

...@@ -46,3 +46,44 @@ ...@@ -46,3 +46,44 @@
location = {Las Cruces, New Mexico}, location = {Las Cruces, New Mexico},
series = {ACL ’94} series = {ACL ’94}
} }
@inproceedings{pennington2014glove,
title = {{G}love: Global Vectors for Word Representation},
author = {Pennington, Jeffrey and Socher, Richard and Manning, Christopher},
booktitle = {Proceedings of the 2014 Conference on Empirical Methods in Natural Language Processing ({EMNLP})},
month = {10},
year = {2014},
address = {Doha, Qatar},
publisher = {Association for Computational Linguistics},
url = {https://doi.org/10.3115/v1/D14-1162},
doi = {10.3115/v1/D14-1162},
pages = "1532--1543"
}
@article{Peters:2018ELMo,
author = {Matthew E. Peters and Mark Neumann and Mohit Iyyer and Matt Gardner and Christopher Clark and Kenton Lee},
title = {Deep contextualized word representations},
journal = {CoRR},
year = {2018},
url = {http://arxiv.org/abs/1802.05365},
archivePrefix = {arXiv},
eprint = {1802.05365}
}
@article{devlin2018bert,
author = {Jacob Devlin and Ming{-}Wei Chang and Kenton Lee and Kristina Toutanova},
title = {{BERT:} Pre-training of Deep Bidirectional Transformers for Language Understanding},
journal = {CoRR},
year = {2018},
url = {http://arxiv.org/abs/1810.04805},
archivePrefix = {arXiv},
eprint = {1810.04805}
}
@inproceedings{Armand:2017FastText,
title = {Bag of Tricks for Efficient Text Classification},
author = {Joulin, Armand and Grave, Edouard and Bojanowski, Piotr and Mikolov, Tomas},
booktitle = {Proceedings of the 15th Conference of the {E}uropean Chapter of the Association for Computational Linguistics},
month = {04},
year = {2017},
address = {Valencia, Spain},
publisher = {Association for Computational Linguistics},
url = {https://doi.org/10.18653/v1/E17-2068},
pages = {427--431}
}
\ No newline at end of file
...@@ -51,23 +51,27 @@ ...@@ -51,23 +51,27 @@
\begin{itemize} \begin{itemize}
\item For ranking the arguments, we measured the semantic similarity \item For ranking the arguments, we measured the semantic similarity
between the premises and conclusions between the premises and conclusions
\item Each argument was embedded word-wise in an averaged vector space \item Argument were embedded word-wise in an averaged vector space
\item The resulting similarity was calculated by using $cos(c, p)$ \item The resulting similarity was calculated by using $Cos(c, p)$
\item In the course of this experiment, we used three different embeddings
\begin{itemize}
\item BERT\footnote{J. Devlin, M. Chang, K. Lee, and K. Toutanova, “BERT: pre-training of deep bidirectional transformers
for language understanding,”}
\item ELMo\footnote{M. E. Peters, M. Neumann, M. Iyyer, M. Gardner, C. Clark, K. Lee, and L. Zettlemoyer, “Deep
contextualized word representations,”}
\item GloVe\footnote{J. Pennington, R. Socher, and C. Manning, “Glove: Global vectors for word representation,”}
\end{itemize}
\end{itemize} \end{itemize}
\begin{block}{Embeddings used}
BERT by \cite{devlin2018bert}
ELMo by \cite{Peters:2018ELMo}
GloVe by \cite{pennington2014glove}
\end{block}
\end{frame} \end{frame}
\begin{frame} \begin{frame}
\frametitle{Sentiment} \frametitle{Sentiment}
\begin{columns}
\column{0.6\textwidth}
\begin{itemize} \begin{itemize}
\item As another approach we used to measure the positivity of the argument \item As another approach we used to measure the positivity of the argument
\item Therefore, we used a sentiment neural network based on FastText\footnote{A. Joulin, E. Grave, P. Bojanowski, and T. Mikolov, “Bag of tricks for efficient text classification,”}, which was \item We used a neural network based on FastText by \cite{Armand:2017FastText}
trained on film ratings of IMDb \item The neural network was trained to indicate the sentiment of IMDb film ratings
\end{itemize} \end{itemize}
\column{0.4\textwidth}
\includegraphics[scale=0.1]{bilder/pngfuel-friends.png}
\end{columns}
\end{frame} \end{frame}
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment