% % GENERATED FROM https://www.coli.uni-saarland.de % by : anonymous % IP : coli2006.lst.uni-saarland.de % at : Mon, 05 Feb 2024 15:43:30 +0100 GMT % % Selection : Author: Victor_Zue % @Book{Ronald A._et_al:1997, TITLE = {Survey of the State of the Art in Human Language Technology}, YEAR = {1997}, EDITOR = {Cole, Ronald A. and Mariani, Joseph and Uszkoreit, Hans and Varile, Giovanni and Zaenen, Annie and Zue, Victor and Zampolli, Antonio}, ADDRESS = {Cambridge}, PUBLISHER = {Cambridge University Press and Giardini}, URL = {also as Web edition: http://www.dfki.de/~hansu/HLT-Survey.pdf}, ANNOTE = {COLIURL : Cole:1997:SSA.pdf} } @InCollection{van Noord_Neumann:1987, AUTHOR = {van Noord, Gertjan and Neumann, Günter}, TITLE = {Syntactic Generation}, YEAR = {1987}, BOOKTITLE = {Survey of the State of the Art in Human Language Technology}, EDITOR = {Cole, Ronald A. and Mariani, Joseph and Uszkoreit, Hans and Zaenen, Annie and Zue, Victor}, ADDRESS = {Cambridge}, PUBLISHER = {Cambridge University Press}, URL = {http://www.dfki.de/~neumann/publications/new-ps/nlg-survey.ps.gz http://grid.let.rug.nl/~vannoord/papers/survey.ps}, ABSTRACT = {In a natural language generation module, we often distinguish two components. On the one hand it needs to be decided what should be said. This task is delegated to a planning component. Such a component might produce an expression representing the content of the proposed utterance. On the basis of this representation the syntactic generation component produces the actual output sentence(s). Although the distinction between planning and syntactic generation is not uncontroversial, we will nonetheless assume such an architecture here, in order to explain some of the issues that arise in syntactic generation. A (natural language) grammar is a formal device that defines a relation between (natural language) utterances and their corresponding meanings. In practice this usually means that a grammar defines a relation between strings and logical forms. During natural language understanding, the task is to arrive at a logical form that corresponds to the input string. Syntactic generation can be described as the problem to find the corresponding string for an input logical form. We are thus making a distinction between the grammar which defines this relation, and the procedure that computes the relation on the basis of such a grammar. In the current state of the art unification-based (or more general: constraint-based) formalisms are used to express such grammars e.g., Lexical Functional Grammar (LFG) [Bre82], Head-Driven Phrase-Structure Grammar (HPSG) [PS87] and constraint-based categorial frameworks (cf. [Usz86] and [ZKC87]). Almost all modern linguistic theories assume that a natural language grammar not only describes the correct sentences of a language, but that such a grammar also describes the corresponding semantic structures of the grammatical sentences. Given that a grammar specifies the relation between phonology and semantics it seems obvious that the generator is supposed to use this specification. For example Generalized Phrase Structure Grammars (GPSG) [GKPS85] provide a detailed description of the semantic interpretation of the sentences licensed by the grammar. Thus one might assume that a generator based on GPSG constructs a sentence for a given semantic structure, according to the semantic interpretation rules of GPSG. Alternatively, [Bus90] presents a generator, based on GPSG, which does not take as its input a logical form, but rather some kind of control expression which merely instructs the grammatical component which rules of the grammar to apply. Similarly, in the conception of [GP90], a generator is provided with some kind of deep structure which can be interpreted as a control expression instructing the grammar which rules to apply. These approaches to the generation problem clearly solve some of the problems encountered in generation---simply by pushing the problem into the conceptual component (i.e., the planning component). In this overview we restrict the attention to the more ambitious approach sketched above. The success of the currently developed constraint-based theories is due to the fact that they are purely declarative. Hence, it is an interesting objective---theoretically and practically---to use one and the same grammar for natural language understanding and generation. In fact the potential for reversibility was a primary motivation for the introduction of Martin Kay’s Functional Unification Grammar (FUG). In recent years interest in such a reversible architecture has led to a number of publications.}, ANNOTE = {COLIURL : Noord:1987:SG.pdf Noord:1987:SG.ps} }