@inproceedings{lietard-etal-2024-word,
title = "To Word Senses and Beyond: Inducing Concepts with Contextualized Language Models",
author = "Li{\'e}tard, Bastien and
Denis, Pascal and
Keller, Mikaela",
editor = "Al-Onaizan, Yaser and
Bansal, Mohit and
Chen, Yun-Nung",
booktitle = "Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2024.emnlp-main.156",
pages = "2684--2696",
abstract = "Polysemy and synonymy are two crucial interrelated facets of lexicalambiguity. While both phenomena are widely documented in lexical resources and have been studied extensively in NLP,leading to dedicated systems, they are often being consideredindependently in practictal problems. While many tasks dealing with polysemy (e.g. Word SenseDisambiguiation or Induction) highlight the role of word{'}s senses,the study of synonymy is rooted in the study of concepts, i.e. meaningsshared across the lexicon. In this paper, we introduce ConceptInduction, the unsupervised task of learning a soft clustering amongwords that defines a set of concepts directly from data. This taskgeneralizes Word Sense Induction. We propose a bi-levelapproach to Concept Induction that leverages both a locallemma-centric view and a global cross-lexicon view to induceconcepts. We evaluate the obtained clustering on SemCor{'}s annotateddata and obtain good performance (BCubed F1 above0.60). We find that the local and the global levels are mutuallybeneficial to induce concepts and also senses in our setting. Finally,we create static embeddings representing our induced concepts and usethem on the Word-in-Context task, obtaining competitive performancewith the State-of-the-Art.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="lietard-etal-2024-word">
<titleInfo>
<title>To Word Senses and Beyond: Inducing Concepts with Contextualized Language Models</title>
</titleInfo>
<name type="personal">
<namePart type="given">Bastien</namePart>
<namePart type="family">Liétard</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Pascal</namePart>
<namePart type="family">Denis</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mikaela</namePart>
<namePart type="family">Keller</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2024-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Yaser</namePart>
<namePart type="family">Al-Onaizan</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Mohit</namePart>
<namePart type="family">Bansal</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Yun-Nung</namePart>
<namePart type="family">Chen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Miami, Florida, USA</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Polysemy and synonymy are two crucial interrelated facets of lexicalambiguity. While both phenomena are widely documented in lexical resources and have been studied extensively in NLP,leading to dedicated systems, they are often being consideredindependently in practictal problems. While many tasks dealing with polysemy (e.g. Word SenseDisambiguiation or Induction) highlight the role of word’s senses,the study of synonymy is rooted in the study of concepts, i.e. meaningsshared across the lexicon. In this paper, we introduce ConceptInduction, the unsupervised task of learning a soft clustering amongwords that defines a set of concepts directly from data. This taskgeneralizes Word Sense Induction. We propose a bi-levelapproach to Concept Induction that leverages both a locallemma-centric view and a global cross-lexicon view to induceconcepts. We evaluate the obtained clustering on SemCor’s annotateddata and obtain good performance (BCubed F1 above0.60). We find that the local and the global levels are mutuallybeneficial to induce concepts and also senses in our setting. Finally,we create static embeddings representing our induced concepts and usethem on the Word-in-Context task, obtaining competitive performancewith the State-of-the-Art.</abstract>
<identifier type="citekey">lietard-etal-2024-word</identifier>
<location>
<url>https://aclanthology.org/2024.emnlp-main.156</url>
</location>
<part>
<date>2024-11</date>
<extent unit="page">
<start>2684</start>
<end>2696</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T To Word Senses and Beyond: Inducing Concepts with Contextualized Language Models
%A Liétard, Bastien
%A Denis, Pascal
%A Keller, Mikaela
%Y Al-Onaizan, Yaser
%Y Bansal, Mohit
%Y Chen, Yun-Nung
%S Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing
%D 2024
%8 November
%I Association for Computational Linguistics
%C Miami, Florida, USA
%F lietard-etal-2024-word
%X Polysemy and synonymy are two crucial interrelated facets of lexicalambiguity. While both phenomena are widely documented in lexical resources and have been studied extensively in NLP,leading to dedicated systems, they are often being consideredindependently in practictal problems. While many tasks dealing with polysemy (e.g. Word SenseDisambiguiation or Induction) highlight the role of word’s senses,the study of synonymy is rooted in the study of concepts, i.e. meaningsshared across the lexicon. In this paper, we introduce ConceptInduction, the unsupervised task of learning a soft clustering amongwords that defines a set of concepts directly from data. This taskgeneralizes Word Sense Induction. We propose a bi-levelapproach to Concept Induction that leverages both a locallemma-centric view and a global cross-lexicon view to induceconcepts. We evaluate the obtained clustering on SemCor’s annotateddata and obtain good performance (BCubed F1 above0.60). We find that the local and the global levels are mutuallybeneficial to induce concepts and also senses in our setting. Finally,we create static embeddings representing our induced concepts and usethem on the Word-in-Context task, obtaining competitive performancewith the State-of-the-Art.
%U https://aclanthology.org/2024.emnlp-main.156
%P 2684-2696
Markdown (Informal)
[To Word Senses and Beyond: Inducing Concepts with Contextualized Language Models](https://aclanthology.org/2024.emnlp-main.156) (Liétard et al., EMNLP 2024)
ACL