Does the Geometry of Word Embeddings Help Document Classification? A Case Study on Persistent Homology Based Representations. Michel, P., Ravichander, A., & Rijhwani, S. Paper abstract bibtex We investigate the pertinence of methods from algebraic topology for text data analysis. These methods enable the development of mathematically-principled isometric-invariant mappings from a set of vectors to a document embedding, which is stable with respect to the geometry of the document in the selected metric space. In this work, we evaluate the utility of these topology-based document representations in traditional NLP tasks, specifically document clustering and sentiment classification. We find that the embeddings do not benefit text analysis. In fact, performance is worse than simple techniques like \$\textbackslash{}textit\{tf-idf\}\$, indicating that the geometry of the document does not provide enough variability for classification on the basis of topic or sentiment in the chosen datasets.

@article{michelDoesGeometryWord2017,
archivePrefix = {arXiv},
eprinttype = {arxiv},
eprint = {1705.10900},
primaryClass = {cs},
title = {Does the {{Geometry}} of {{Word Embeddings Help Document Classification}}? {{A Case Study}} on {{Persistent Homology Based Representations}}},
url = {http://arxiv.org/abs/1705.10900},
shorttitle = {Does the {{Geometry}} of {{Word Embeddings Help Document Classification}}?},
abstract = {We investigate the pertinence of methods from algebraic topology for text data analysis. These methods enable the development of mathematically-principled isometric-invariant mappings from a set of vectors to a document embedding, which is stable with respect to the geometry of the document in the selected metric space. In this work, we evaluate the utility of these topology-based document representations in traditional NLP tasks, specifically document clustering and sentiment classification. We find that the embeddings do not benefit text analysis. In fact, performance is worse than simple techniques like \$\textbackslash{}textit\{tf-idf\}\$, indicating that the geometry of the document does not provide enough variability for classification on the basis of topic or sentiment in the chosen datasets.},
urldate = {2019-02-19},
date = {2017-05-30},
keywords = {Computer Science - Computation and Language},
author = {Michel, Paul and Ravichander, Abhilasha and Rijhwani, Shruti},
file = {/home/dimitri/Nextcloud/Zotero/storage/KM5ERMB9/Michel et al. - 2017 - Does the Geometry of Word Embeddings Help Document.pdf;/home/dimitri/Nextcloud/Zotero/storage/E39REUD6/1705.html}
}

Downloads: 0

{"_id":"rJFAmx2zaH7mK8YcZ","bibbaseid":"michel-ravichander-rijhwani-doesthegeometryofwordembeddingshelpdocumentclassificationacasestudyonpersistenthomologybasedrepresentations","authorIDs":[],"author_short":["Michel, P.","Ravichander, A.","Rijhwani, S."],"bibdata":{"bibtype":"article","type":"article","archiveprefix":"arXiv","eprinttype":"arxiv","eprint":"1705.10900","primaryclass":"cs","title":"Does the Geometry of Word Embeddings Help Document Classification? A Case Study on Persistent Homology Based Representations","url":"http://arxiv.org/abs/1705.10900","shorttitle":"Does the Geometry of Word Embeddings Help Document Classification?","abstract":"We investigate the pertinence of methods from algebraic topology for text data analysis. These methods enable the development of mathematically-principled isometric-invariant mappings from a set of vectors to a document embedding, which is stable with respect to the geometry of the document in the selected metric space. In this work, we evaluate the utility of these topology-based document representations in traditional NLP tasks, specifically document clustering and sentiment classification. We find that the embeddings do not benefit text analysis. In fact, performance is worse than simple techniques like \\$\\textbackslash{}textit\\{tf-idf\\}\\$, indicating that the geometry of the document does not provide enough variability for classification on the basis of topic or sentiment in the chosen datasets.","urldate":"2019-02-19","date":"2017-05-30","keywords":"Computer Science - Computation and Language","author":[{"propositions":[],"lastnames":["Michel"],"firstnames":["Paul"],"suffixes":[]},{"propositions":[],"lastnames":["Ravichander"],"firstnames":["Abhilasha"],"suffixes":[]},{"propositions":[],"lastnames":["Rijhwani"],"firstnames":["Shruti"],"suffixes":[]}],"file":"/home/dimitri/Nextcloud/Zotero/storage/KM5ERMB9/Michel et al. - 2017 - Does the Geometry of Word Embeddings Help Document.pdf;/home/dimitri/Nextcloud/Zotero/storage/E39REUD6/1705.html","bibtex":"@article{michelDoesGeometryWord2017,\n archivePrefix = {arXiv},\n eprinttype = {arxiv},\n eprint = {1705.10900},\n primaryClass = {cs},\n title = {Does the {{Geometry}} of {{Word Embeddings Help Document Classification}}? {{A Case Study}} on {{Persistent Homology Based Representations}}},\n url = {http://arxiv.org/abs/1705.10900},\n shorttitle = {Does the {{Geometry}} of {{Word Embeddings Help Document Classification}}?},\n abstract = {We investigate the pertinence of methods from algebraic topology for text data analysis. These methods enable the development of mathematically-principled isometric-invariant mappings from a set of vectors to a document embedding, which is stable with respect to the geometry of the document in the selected metric space. In this work, we evaluate the utility of these topology-based document representations in traditional NLP tasks, specifically document clustering and sentiment classification. We find that the embeddings do not benefit text analysis. In fact, performance is worse than simple techniques like \\$\\textbackslash{}textit\\{tf-idf\\}\\$, indicating that the geometry of the document does not provide enough variability for classification on the basis of topic or sentiment in the chosen datasets.},\n urldate = {2019-02-19},\n date = {2017-05-30},\n keywords = {Computer Science - Computation and Language},\n author = {Michel, Paul and Ravichander, Abhilasha and Rijhwani, Shruti},\n file = {/home/dimitri/Nextcloud/Zotero/storage/KM5ERMB9/Michel et al. - 2017 - Does the Geometry of Word Embeddings Help Document.pdf;/home/dimitri/Nextcloud/Zotero/storage/E39REUD6/1705.html}\n}\n\n","author_short":["Michel, P.","Ravichander, A.","Rijhwani, S."],"key":"michelDoesGeometryWord2017","id":"michelDoesGeometryWord2017","bibbaseid":"michel-ravichander-rijhwani-doesthegeometryofwordembeddingshelpdocumentclassificationacasestudyonpersistenthomologybasedrepresentations","role":"author","urls":{"Paper":"http://arxiv.org/abs/1705.10900"},"keyword":["Computer Science - Computation and Language"],"downloads":0},"bibtype":"article","biburl":"https://raw.githubusercontent.com/dlozeve/newblog/master/bib/all.bib","creationDate":"2020-01-08T20:39:39.254Z","downloads":0,"keywords":["computer science - computation and language"],"search_terms":["geometry","word","embeddings","help","document","classification","case","study","persistent","homology","based","representations","michel","ravichander","rijhwani"],"title":"Does the Geometry of Word Embeddings Help Document Classification? A Case Study on Persistent Homology Based Representations","year":null,"dataSources":["3XqdvqRE7zuX4cm8m"]}