% % GENERATED FROM https://www.coli.uni-saarland.de % by : anonymous % IP : coli2006.lst.uni-saarland.de % at : Mon, 05 Feb 2024 15:43:10 +0100 GMT % % Selection : Author: Ellen_Douglas-Cowie % @InProceedings{Cowie_et_al:2000, AUTHOR = {Cowie, Roddy and Douglas-Cowie, Ellen and Savvidou, Suzie and McMahon, Edelle and Sawey, Martin and Schröder, Marc}, TITLE = {'FEELTRACE': An Instrument for Recording Perceived Emotion in Real Time}, YEAR = {2000}, BOOKTITLE = {Proceedings of the ISCA Workshop on Speech and Emotion: A Conceptual Framework for Research}, PAGES = {19-24}, EDITOR = {Douglas-Cowie, Ellen and Cowie, Roddy and Schröder, Marc}, ADDRESS = {Belfast}, PUBLISHER = {Textflow}, URL = {http://www.dfki.de/~schroed/articles/cowieetal2000.pdf}, ABSTRACT = {FEELTRACE is an instrument developed to let observers track the emotional content of a stimulus as they perceive it over time, allowing the emotional dynamics of speech episodes to be examined. It is based on activation-evaluation space, a representation derived from psychology. The activation dimension measures how dynamic the emotional state is; the evaluation dimension is a global measure of the positive or negative feeling associated with the state. Research suggests that the space is naturally circular, i.e. states which are at the limit of emotional intensity define a circle, with alert neutrality at the centre. To turn those ideas into a recording tool, the space was represented by a circle on a computer screen, and observers described perceived emotional state by moving a pointer (in the form of a disc) to the appropriate point in the circle, using a mouse. Prototypes were tested, and in the light of results, refinements were made to ensure that outputs were as consistent and meaningful as possible. They include colour coding the pointer in a way that users readily associate with the relevant emotional state; presenting key emotion words as ‘landmarks’ at the strategic points in the space; and developing an induction procedure to introduce observers to the system. An experiment assessed the reliability of the developed system. Stimuli were 16 clips from TV programs, two showing relatively strong emotions in each quadrant of activationevaluation space, each paired with one of the same person in a relatively neural state. 24 raters took part. Differences between clips chosen to contrast were statistically robust. Results were plotted in activation-evaluation space as ellipses, each with its centre at the mean co-ordinates for the clip, and its width proportional to standard deviation across raters. The size of the ellipses meant that about 25 could be fitted into the space, i.e. FEELTRACE has resolving power comparable to an emotion vocabulary of 20 non-overlapping words, with the advantage of allowing intermediate ratings, and above all, the ability to track impressions continuously.}, ANNOTE = {COLIURL : Cowie:2000:FIR.pdf} } @InProceedings{Cowie_et_al:2000, AUTHOR = {Cowie, Roddy and Douglas-Cowie, Ellen and Savvidou, Suzie and McMahon, Edelle and Sawey, Martin and Schröder, Marc}, TITLE = {'FEELTRACE': An Instrument for Recording Perceived Emotion in Real Time}, YEAR = {2000}, BOOKTITLE = {Proceedings of the ISCA Workshop on Speech and Emotion: A Conceptual Framework for Research}, PAGES = {19-24}, EDITOR = {Douglas-Cowie, Ellen and Cowie, Roddy and Schröder, Marc}, ADDRESS = {Belfast}, PUBLISHER = {Textflow}, URL = {http://www.dfki.de/~schroed/articles/cowieetal2000.pdf}, ABSTRACT = {FEELTRACE is an instrument developed to let observers track the emotional content of a stimulus as they perceive it over time, allowing the emotional dynamics of speech episodes to be examined. It is based on activation-evaluation space, a representation derived from psychology. The activation dimension measures how dynamic the emotional state is; the evaluation dimension is a global measure of the positive or negative feeling associated with the state. Research suggests that the space is naturally circular, i.e. states which are at the limit of emotional intensity define a circle, with alert neutrality at the centre. To turn those ideas into a recording tool, the space was represented by a circle on a computer screen, and observers described perceived emotional state by moving a pointer (in the form of a disc) to the appropriate point in the circle, using a mouse. Prototypes were tested, and in the light of results, refinements were made to ensure that outputs were as consistent and meaningful as possible. They include colour coding the pointer in a way that users readily associate with the relevant emotional state; presenting key emotion words as ‘landmarks’ at the strategic points in the space; and developing an induction procedure to introduce observers to the system. An experiment assessed the reliability of the developed system. Stimuli were 16 clips from TV programs, two showing relatively strong emotions in each quadrant of activationevaluation space, each paired with one of the same person in a relatively neural state. 24 raters took part. Differences between clips chosen to contrast were statistically robust. Results were plotted in activation-evaluation space as ellipses, each with its centre at the mean co-ordinates for the clip, and its width proportional to standard deviation across raters. The size of the ellipses meant that about 25 could be fitted into the space, i.e. FEELTRACE has resolving power comparable to an emotion vocabulary of 20 non-overlapping words, with the advantage of allowing intermediate ratings, and above all, the ability to track impressions continuously.}, ANNOTE = {COLIURL : Cowie:2000:FIR.pdf} } @Proceedings{Roddy_et_al:2000, TITLE = {Proceedings of the ISCA Workshop on Speech and Emotion: A Conceptual Framework for Research}, YEAR = {2000}, EDITOR = {Cowie, Roddy and Douglas-Cowie, Ellen and Schröder, Marc}, ADDRESS = {Belfast}, PUBLISHER = {Textflow}, URL = {http://www.qub.ac.uk/en/isca/proceedings} } @InProceedings{Douglas-Cowie_et_al:2000, AUTHOR = {Douglas-Cowie, Ellen and Cowie, Roddy and Schröder, Marc}, TITLE = {A New Emotion Database: Considerations, Sources and Scope}, YEAR = {2000}, BOOKTITLE = {Proceedings of the ISCA Workshop on Speech and Emotion: A Conceptual Framework for Research}, PAGES = {39-44}, EDITOR = {Douglas-Cowie, Ellen and Cowie, Roddy and Schröder, Marc}, ADDRESS = {Belfast}, PUBLISHER = {Textflow}, URL = {http://www.dfki.de/~schroed/articles/douglascowieetal2000.pdf}, ABSTRACT = {Research on the expression of emotion is underpinned by databases. Reviewing available resources persuaded us of the need to develop one that prioritised ecological validity. The basic unit of the database is a clip, which is an audiovisual recording of an episode that appears to be reasonably selfcontained. Clips range from 10 -- 60 secs, and are captured as MPEG files. They were drawn from two main sources. People were recorded discussing emotive subjects either with each other, or with one of the research team. We also recorded extracts from television programs where members of the public interact in a way that at least appears essentially spontaneous. Associated with each clip are two additional types of file. An audio file (.wav format) contains speech alone, edited to remove sounds other than the main speaker. An interpretation file describes the emotional state that observers attribute to the main speaker, using the FEELTRACE system to provide a continuous record of the perceived ebb and flow of emotion. Clips have been extracted for 100 speakers, with at least two for each speaker (one relatively neutral and others showing marked emotions of different kinds).}, ANNOTE = {COLIURL : Douglas-Cowie:2000:NED.pdf} } @InProceedings{Douglas-Cowie_et_al:2000, AUTHOR = {Douglas-Cowie, Ellen and Cowie, Roddy and Schröder, Marc}, TITLE = {A New Emotion Database: Considerations, Sources and Scope}, YEAR = {2000}, BOOKTITLE = {Proceedings of the ISCA Workshop on Speech and Emotion: A Conceptual Framework for Research}, PAGES = {39-44}, EDITOR = {Douglas-Cowie, Ellen and Cowie, Roddy and Schröder, Marc}, ADDRESS = {Belfast}, PUBLISHER = {Textflow}, URL = {http://www.dfki.de/~schroed/articles/douglascowieetal2000.pdf}, ABSTRACT = {Research on the expression of emotion is underpinned by databases. Reviewing available resources persuaded us of the need to develop one that prioritised ecological validity. The basic unit of the database is a clip, which is an audiovisual recording of an episode that appears to be reasonably selfcontained. Clips range from 10 -- 60 secs, and are captured as MPEG files. They were drawn from two main sources. People were recorded discussing emotive subjects either with each other, or with one of the research team. We also recorded extracts from television programs where members of the public interact in a way that at least appears essentially spontaneous. Associated with each clip are two additional types of file. An audio file (.wav format) contains speech alone, edited to remove sounds other than the main speaker. An interpretation file describes the emotional state that observers attribute to the main speaker, using the FEELTRACE system to provide a continuous record of the perceived ebb and flow of emotion. Clips have been extracted for 100 speakers, with at least two for each speaker (one relatively neutral and others showing marked emotions of different kinds).}, ANNOTE = {COLIURL : Douglas-Cowie:2000:NED.pdf} } @InProceedings{Schröder:2000, AUTHOR = {Schröder, Marc}, TITLE = {Experimental Study of Affect Bursts}, YEAR = {2000}, BOOKTITLE = {ISCA Workshop on Speech and Emotion}, PAGES = {132-137}, EDITOR = {Cowie, Roddy and Douglas-Cowie, Ellen and Schröder, Marc}, ADDRESS = {Belfast}, PUBLISHER = {Textflow}, URL = {http://www.dfki.de/~schroed/articles/schroeder2000.pdf}, ABSTRACT = {The study described here investigates the perceived emotional content of “affect bursts” for German. Affect bursts are defined as short emotional non-speech expressions interrupting speech. This study shows that affect bursts, presented without context, can convey a clearly identifiable emotional meaning. Affect bursts expressing ten emotions were produced by actors. After a pre-selection procedure, “good examples” for each emotion were presented in a perception test. The mean recognition score of 81% indicates that affect bursts seem to be an effective means of expressing emotions. Affect bursts are grouped into classes on the basis of phonetic similarity. Recognition and confusion patterns are examined for these. classes}, ANNOTE = {COLIURL : Schroder:2000:ESA.pdf} } @InProceedings{Schröder_et_al:2001, AUTHOR = {Schröder, Marc and Cowie, Roddy and Douglas-Cowie, Ellen and Westerdijk, Machiel and Gielen, Stan}, TITLE = {Acoustic Correlates of Emotion Dimensions in View of Speech Synthesis}, YEAR = {2001}, BOOKTITLE = {Proceedings of the 7th European Conference on Speech Communication and Technology (EUROSPEECH'01)}, VOLUME = {1}, PAGES = {87--90}, EDITOR = {Dalsgaard, Paul and Lindberg, Borge and Benner, Henrik}, ADDRESS = {Aalborg}, PUBLISHER = {Kommunik Grafiske Losninger A/S}, URL = {http://www.dfki.de/~schroed/articles/schroeder_etal2001.pdf}, ABSTRACT = {In a database of emotional speech, dimensional descriptions of emotional states have been correlated with acoustic variables. Many stable correlations have been found. The predictions made by linear regression widely agree with the literature. The numerical form of the description and the choice of acoustic variables studied are particularly well suited for future implementation in a speech synthesis system, possibly allowing for the expression of gradual emotional states.}, ANNOTE = {COLIURL : Schroder:2001:ACE.pdf} }