@article{3316, keywords = {Benchmark, UEQ, User Experience, UX}, author = {Anna-Lena Meiners and Martin Schrepp and Andreas Hinderks and Jörg Thomaschewski}, title = {A Benchmark for the UEQ+ Framework: Construction of a Simple Tool to Quickly Interpret UEQ+ KPIs}, abstract = {Questionnaires are a highly efficient method to compare the user experience (UX) of different interactive products or versions of a single product. Concretely, they allow us to evaluate the UX easily and to compare different products with a numeric UX score. However, often only one UX score from a single evaluated product is available. Without a comparison to other measurements, it is difficult to interpret an individual score, e.g. to decide whether a product’s UX is good enough to compete in the market. Many questionnaires offer benchmarks to support researchers in these cases. A benchmark is the result of a larger set of product evaluations performed with the same questionnaire. The score obtained from a single product evaluation can be compared to the scores from this benchmark data set to quickly interpret the results. In this paper, the first benchmark for the UEQ+ (User Experience Questionnaire +) is presented, which was created using 3.290 UEQ+ responses for 26 successful software products. The UEQ+ is a modular framework that contains a high number of validated user experience scales that can be combined to form a UX questionnaire. Currently, no benchmark is available for this framework, making the benchmark constructed in this paper a valuable interpretation tool for UEQ+ questionnaires.}, year = {9998}, journal = {International Journal of Interactive Multimedia and Artificial Intelligence}, volume = {In Press}, chapter = {1}, number = {In Press}, pages = {1-8}, month = {05/2023}, issn = {1989-1660}, url = {https://www.ijimai.org/journal/sites/default/files/2023-05/ip2023_05_003.pdf}, doi = {10.9781/ijimai.2023.05.003}, }