@inproceedings{plank-2021-back,
title = "From back to the roots into the gated woods: Deep learning for {NLP}",
author = "Plank, Barbara",
editor = "Jurgens, David and
Kolhatkar, Varada and
Li, Lucy and
Mieskes, Margot and
Pedersen, Ted",
booktitle = "Proceedings of the Fifth Workshop on Teaching NLP",
month = jun,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.teachingnlp-1.9",
doi = "10.18653/v1/2021.teachingnlp-1.9",
pages = "59--61",
abstract = "Deep neural networks have revolutionized many fields, including Natural Language Processing. This paper outlines teaching materials for an introductory lecture on deep learning in Natural Language Processing (NLP). The main submitted material covers a summer school lecture on encoder-decoder models. Complementary to this is a set of jupyter notebook slides from earlier teaching, on which parts of the lecture were based on. The main goal of this teaching material is to provide an overview of neural network approaches to natural language processing, while linking modern concepts back to the roots showing traditional essential counterparts. The lecture departs from count-based statistical methods and spans up to gated recurrent networks and attention, which is ubiquitous in today{'}s NLP.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="plank-2021-back">
<titleInfo>
<title>From back to the roots into the gated woods: Deep learning for NLP</title>
</titleInfo>
<name type="personal">
<namePart type="given">Barbara</namePart>
<namePart type="family">Plank</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-06</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Fifth Workshop on Teaching NLP</title>
</titleInfo>
<name type="personal">
<namePart type="given">David</namePart>
<namePart type="family">Jurgens</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Varada</namePart>
<namePart type="family">Kolhatkar</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Lucy</namePart>
<namePart type="family">Li</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Margot</namePart>
<namePart type="family">Mieskes</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ted</namePart>
<namePart type="family">Pedersen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Online</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Deep neural networks have revolutionized many fields, including Natural Language Processing. This paper outlines teaching materials for an introductory lecture on deep learning in Natural Language Processing (NLP). The main submitted material covers a summer school lecture on encoder-decoder models. Complementary to this is a set of jupyter notebook slides from earlier teaching, on which parts of the lecture were based on. The main goal of this teaching material is to provide an overview of neural network approaches to natural language processing, while linking modern concepts back to the roots showing traditional essential counterparts. The lecture departs from count-based statistical methods and spans up to gated recurrent networks and attention, which is ubiquitous in today’s NLP.</abstract>
<identifier type="citekey">plank-2021-back</identifier>
<identifier type="doi">10.18653/v1/2021.teachingnlp-1.9</identifier>
<location>
<url>https://aclanthology.org/2021.teachingnlp-1.9</url>
</location>
<part>
<date>2021-06</date>
<extent unit="page">
<start>59</start>
<end>61</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T From back to the roots into the gated woods: Deep learning for NLP
%A Plank, Barbara
%Y Jurgens, David
%Y Kolhatkar, Varada
%Y Li, Lucy
%Y Mieskes, Margot
%Y Pedersen, Ted
%S Proceedings of the Fifth Workshop on Teaching NLP
%D 2021
%8 June
%I Association for Computational Linguistics
%C Online
%F plank-2021-back
%X Deep neural networks have revolutionized many fields, including Natural Language Processing. This paper outlines teaching materials for an introductory lecture on deep learning in Natural Language Processing (NLP). The main submitted material covers a summer school lecture on encoder-decoder models. Complementary to this is a set of jupyter notebook slides from earlier teaching, on which parts of the lecture were based on. The main goal of this teaching material is to provide an overview of neural network approaches to natural language processing, while linking modern concepts back to the roots showing traditional essential counterparts. The lecture departs from count-based statistical methods and spans up to gated recurrent networks and attention, which is ubiquitous in today’s NLP.
%R 10.18653/v1/2021.teachingnlp-1.9
%U https://aclanthology.org/2021.teachingnlp-1.9
%U https://doi.org/10.18653/v1/2021.teachingnlp-1.9
%P 59-61
Markdown (Informal)
[From back to the roots into the gated woods: Deep learning for NLP](https://aclanthology.org/2021.teachingnlp-1.9) (Plank, TeachingNLP 2021)
ACL