-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathreferences.bib
235 lines (222 loc) · 20.1 KB
/
references.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
@inproceedings{bennettInterdependenceFrameAssistive2018,
title = {Interdependence as a {{Frame}} for {{Assistive Technology Research}} and {{Design}}},
booktitle = {Proceedings of the 20th {{International ACM SIGACCESS Conference}} on {{Computers}} and {{Accessibility}}},
author = {Bennett, Cynthia L. and Brady, Erin and Branham, Stacy M. and Yam, Yu Jun},
year = {2018},
month = oct,
pages = {161--173},
publisher = {ACM},
address = {Galway Ireland},
doi = {10.1145/3234695.3236348},
urldate = {2024-06-22},
isbn = {978-1-4503-5650-3},
langid = {english},
file = {C:\Users\jseo1005\Zotero\storage\35MUWGL3\Bennett et al. - 2018 - Interdependence as a Frame for Assistive Technology Research and Design.pdf}
}
@inproceedings{caineLocalStandardsSample2016,
title = {Local {{Standards}} for {{Sample Size}} at {{CHI}}},
booktitle = {Proceedings of the 2016 {{CHI Conference}} on {{Human Factors}} in {{Computing Systems}}},
author = {Caine, Kelly},
year = {2016},
month = may,
pages = {981--992},
publisher = {ACM},
address = {San Jose California USA},
doi = {10.1145/2858036.2858498},
urldate = {2024-04-17},
isbn = {978-1-4503-3362-7},
langid = {english},
file = {C:\Users\jseo1005\Zotero\storage\ZGNWYC8W\Caine - 2016 - Local Standards for Sample Size at CHI.pdf}
}
@article{faulknerFiveuserAssumptionBenefits2003,
title = {Beyond the Five-User Assumption: {{Benefits}} of Increased Sample Sizes in Usability Testing},
shorttitle = {Beyond the Five-User Assumption},
author = {Faulkner, Laura},
year = {2003},
month = aug,
journal = {Behavior Research Methods, Instruments, \& Computers},
volume = {35},
number = {3},
pages = {379--383},
issn = {1532-5970},
doi = {10.3758/BF03195514},
urldate = {2024-04-17},
abstract = {It is widely assumed that 5 participants suffice for usability testing. In this study, 60 users were tested and random sets of 5 or more were sampled from the whole, to demonstrate the risks of using only 5 participants and the benefits of using more. Some of the randomly selected sets of 5 participants found 99\% of the problems; other sets found only 55\%. With 10 users, the lowest percentage of problems revealed by any one set was increased to 80\%, and with 20 users, to 95\%.},
langid = {english},
keywords = {Tick Mark,Usability Problem,Usability Professional,Usability Test,User Deviation},
file = {C:\Users\jseo1005\Zotero\storage\4RV2AVQ6\Faulkner - 2003 - Beyond the five-user assumption Benefits of increased sample sizes in usability testing.pdf}
}
@inproceedings{geStereoMathAccessibleMusical2024,
title = {{{StereoMath}}: {{An Accessible}} and {{Musical Equation Editor}}},
shorttitle = {{{StereoMath}}},
booktitle = {Proceedings of the 26th {{International ACM SIGACCESS Conference}} on {{Computers}} and {{Accessibility}}},
author = {Ge, Kenneth and Seo, JooYoung},
year = {2024},
month = oct,
series = {{{ASSETS}} '24},
pages = {1--5},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
doi = {10.1145/3663548.3688487},
urldate = {2025-02-28},
abstract = {For blind and low-vision (BLV) individuals, digital math communication is uniquely difficult due to the lack of accessible tools. Currently, the state of the art is either code-based, like LaTeX, or WYSIWYG, like visual editors. However, both paradigms view math communication as primarily a visual typesetting problem, and may be accessible but difficult to use. In this paper, we present an equation editor that is built from the ground up with BLV accessibility in mind. Specifically, we notice that two of the biggest barriers with current technology are the high cognitive load and the lack of spatial relationships. Thus, we build an editor that uses spatial audio cues, muscle memory, tones, and more intuitive navigation to properly contextualize math equations. We discuss how this new paradigm can enable new levels of math communication, engagement, and literacy. Finally, we discuss natural next steps.},
isbn = {979-8-4007-0677-6}
}
@inproceedings{kamathPlayingBarriersCrafting2024,
title = {Playing {{Without Barriers}}: {{Crafting Playful}} and {{Accessible VR Table-Tennis}} with and for {{Blind}} and {{Low-Vision Individuals}}},
shorttitle = {Playing {{Without Barriers}}},
booktitle = {Proceedings of the 26th {{International ACM SIGACCESS Conference}} on {{Computers}} and {{Accessibility}}},
author = {Kamath, Sanchita S. and Zeidieh, Aziz and Khan, Omar and Sethi, Dhruv and Seo, JooYoung},
year = {2024},
month = oct,
series = {{{ASSETS}} '24},
pages = {1--5},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
doi = {10.1145/3663548.3688526},
urldate = {2025-02-28},
abstract = {Virtual reality (VR) has been celebrated for its immersive experiences, yet its potential for creating accessible and enjoyable environments for Blind and Low-Vision (BLV) individuals remains underexplored. Our project addresses this gap by developing a VR table tennis game specifically designed for BLV players. Utilizing an autoethnographic approach, our mixed-ability team, including three BLV co-designers, prototyped the game through rapid iterative testing and evaluation over four months. We integrated multi-sensory feedback mechanisms, such as spatial audio, haptic feedback, and high-contrast visuals, to enhance navigation and interaction. Our findings highlight the effectiveness of combining these modalities to create an enjoyable and realistic VR sports experience. However, we also identified challenges, such as the need for balanced sensory feedback to avoid overload. This study emphasizes the importance of inclusive design in VR gaming, offering new recreational opportunities for BLV individuals and setting the stage for future advancements in accessible VR technology.},
isbn = {979-8-4007-0677-6}
}
@article{nowellThematicAnalysisStriving2017,
title = {Thematic {{Analysis}}: {{Striving}} to {{Meet}} the {{Trustworthiness Criteria}}},
shorttitle = {Thematic {{Analysis}}},
author = {Nowell, Lorelli S. and Norris, Jill M. and White, Deborah E. and Moules, Nancy J.},
year = {2017},
month = dec,
journal = {International Journal of Qualitative Methods},
volume = {16},
number = {1},
pages = {1609406917733847},
publisher = {SAGE Publications Inc},
issn = {1609-4069},
doi = {10.1177/1609406917733847},
urldate = {2025-02-28},
abstract = {As qualitative research becomes increasingly recognized and valued, it is imperative that it is conducted in a rigorous and methodical manner to yield meaningful and useful results. To be accepted as trustworthy, qualitative researchers must demonstrate that data analysis has been conducted in a precise, consistent, and exhaustive manner through recording, systematizing, and disclosing the methods of analysis with enough detail to enable the reader to determine whether the process is credible. Although there are numerous examples of how to conduct qualitative research, few sophisticated tools are available to researchers for conducting a rigorous and relevant thematic analysis. The purpose of this article is to guide researchers using thematic analysis as a research method. We offer personal insights and practical examples, while exploring issues of rigor and trustworthiness. The process of conducting a thematic analysis is illustrated through the presentation of an auditable decision trail, guiding interpreting and representing textual data. We detail our step-by-step approach to exploring the effectiveness of strategic clinical networks in Alberta, Canada, in our mixed methods case study. This article contributes a purposeful approach to thematic analysis in order to systematize and increase the traceability and verification of the analysis.},
langid = {english},
file = {C:\Users\jseo1005\Zotero\storage\QR443JIQ\Nowell et al. - 2017 - Thematic Analysis Striving to Meet the Trustworthiness Criteria.pdf}
}
@misc{pillaiAccessibleUsabilityScale,
title = {Accessible {{Usability Scale}} ({{AUS}})},
author = {Pillai, Alwar},
journal = {Fable},
urldate = {2024-04-17},
abstract = {The Accessible Usability Scale (AUS) is a free tool to measure the usability of a digital product for assistive technology users.},
howpublished = {https://makeitfable.com/accessible-usability-scale/},
langid = {american},
file = {C:\Users\jseo1005\Zotero\storage\F4KMWS7J\accessible-usability-scale.html}
}
@inproceedings{seoCodingNonVisuallyVisual2023,
title = {Coding {{Non-Visually}} in {{Visual Studio Code}}: {{Collaboration Towards Accessible Development Environment}} for {{Blind Programmers}}},
shorttitle = {Coding {{Non-Visually}} in {{Visual Studio Code}}},
booktitle = {Proceedings of the 25th {{International ACM SIGACCESS Conference}} on {{Computers}} and {{Accessibility}}},
author = {Seo, JooYoung and Rogge, Megan},
year = {2023},
month = oct,
series = {{{ASSETS}} '23},
pages = {1--9},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
doi = {10.1145/3597638.3614550},
urldate = {2024-06-21},
abstract = {This paper delineates a fruitful collaboration between blind and sighted developers, aiming to augment the accessibility of Visual Studio Code (VSCode). Our shared journey is portrayed through examples drawn from our interaction with GitHub issues, pull requests, review processes, and insider's releases, each contributing to an improved VSCode experience for blind developers. One key milestone of our co-design process is the establishment of an accessible terminal buffer, a significant enhancement for blind developers using VSCode. Other innovative outcomes include Git Diff audio cues, adaptable verbosity settings, intuitive help menus, and a targeted accessibility testing initiative. These tailored improvements not only uplift the accessibility standards of VSCode but also provide a valuable blueprint for open-source developers at large. Through our shared dedication to promoting inclusivity in software development, we aim for the strategies and successes shared in this paper to inspire and guide the open-source community towards crafting more accessible software environments.},
isbn = {979-8-4007-0220-4},
keywords = {accessibility,integrated development environment,nonvisual programming,visual studio code}
}
@article{seoDesigningBornAccessibleCourses2024,
title = {Designing {{Born-Accessible Courses}} in {{Data Science}} and {{Visualization}}: {{Challenges}} and {{Opportunities}} of a {{Remote Curriculum Taught}} by {{Blind Instructors}} to {{Blind Students}}},
shorttitle = {Designing {{Born-Accessible Courses}} in {{Data Science}} and {{Visualization}}},
author = {Seo, Joo Young and O'Modhrain, Sile and Xia, Yilin and Kamath, Sanchita S. and Lee, Bongshin and Coughlan, James},
year = {2024},
journal = {EuroVis 2024 - Education Papers},
edition = {1053},
publisher = {The Eurographics Association},
doi = {10.2312/EVED.20241053},
urldate = {2024-06-21},
abstract = {While recent years have seen a growing interest in accessible visualization tools and techniques for blind people, little attention is paid to the learning opportunities and teaching strategies of data science and visualization tailored for blind individuals. Whereas the former focuses on the accessibility and usability issues of data visualization tools, the latter is concerned with the learnability of concepts and skills for data science and visualization. In this paper, we present novel approaches to teaching data science and visualization to blind students in an online setting. Taught by blind instructors, nine blind learners having a wide range of professional backgrounds participated in a two-week summer course. We describe the course design, teaching strategies, and learning outcomes. We also discuss the challenges and opportunities of teaching data science and visualization to blind students. Our work contributes to the growing body of knowledge on accessible data science and visualization education, and provides insights into the design of online courses for blind students.},
copyright = {Creative Commons Attribution 4.0 International},
isbn = {9783038682578},
langid = {english},
keywords = {Applied computing Education,CCS Concepts: Applied computing Education},
file = {C:\Users\jseo1005\Zotero\storage\24XM2ZFC\Seo et al. - 2024 - Designing Born-Accessible Courses in Data Science and Visualization Challenges and Opportunities of.pdf}
}
@inproceedings{seoMAIDRMakingStatistical2024,
title = {{{MAIDR}}: {{Making Statistical Visualizations Accessible}} with {{Multimodal Data Representation}}},
shorttitle = {{{MAIDR}}},
booktitle = {Proceedings of the {{CHI Conference}} on {{Human Factors}} in {{Computing Systems}}},
author = {Seo, JooYoung and Xia, Yilin and Lee, Bongshin and Mccurry, Sean and Yam, Yu Jun},
year = {2024},
month = may,
series = {{{CHI}} '24},
pages = {1--22},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
doi = {10.1145/3613904.3642730},
urldate = {2024-06-21},
abstract = {This paper investigates new data exploration experiences that enable blind users to interact with statistical data visualizations---bar plots, heat maps, box plots, and scatter plots---leveraging multimodal data representations. In addition to sonification and textual descriptions that are commonly employed by existing accessible visualizations, our MAIDR (multimodal access and interactive data representation) system incorporates two additional modalities (braille and review) that offer complementary benefits. It also provides blind users with the autonomy and control to interactively access and understand data visualizations. In a user study involving 11 blind participants, we found the MAIDR system facilitated the accurate interpretation of statistical visualizations. Participants exhibited a range of strategies in combining multiple modalities, influenced by their past interactions and experiences with data visualizations. This work accentuates the overlooked potential of combining refreshable tactile representation with other modalities and elevates the discussion on the importance of user autonomy when designing accessible data visualizations.},
isbn = {979-8-4007-0330-0},
keywords = {Accessibility,Blind,Braille Display,Multimodality,Screen Readers,Statistical Visualization},
file = {C:\Users\jseo1005\Zotero\storage\M4T7PEXR\Seo et al. - 2024 - MAIDR Making Statistical Visualizations Accessible with Multimodal Data Representation.pdf}
}
@inproceedings{seoMAIDRMeetsAI2024,
title = {{{MAIDR Meets AI}}: {{Exploring Multimodal LLM-Based Data Visualization Interpretation}} by and with {{Blind}} and {{Low-Vision Users}}},
shorttitle = {{{MAIDR Meets AI}}},
booktitle = {Proceedings of the 26th {{International ACM SIGACCESS Conference}} on {{Computers}} and {{Accessibility}}},
author = {Seo, JooYoung and Kamath, Sanchita S. and Zeidieh, Aziz and Venkatesh, Saairam and McCurry, Sean},
year = {2024},
month = oct,
series = {{{ASSETS}} '24},
pages = {1--31},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
doi = {10.1145/3663548.3675660},
urldate = {2025-02-28},
abstract = {This paper investigates how blind and low-vision (BLV) users interact with multimodal large language models (LLMs) to interpret data visualizations. Building upon our previous work on the multimodal access and interactive data representation (MAIDR) framework, our mixed-visual-ability team co-designed maidrAI, an LLM extension providing multiple AI responses to users' visual queries. To explore generative AI-based data representation, we conducted user studies with 8 BLV participants, tasking them with interpreting box plots using our system. We examined how participants personalize LLMs through prompt engineering, their preferences for data visualization descriptions, and strategies for verifying LLM responses. Our findings highlight three dimensions affecting BLV users' decision-making process: modal preference, LLM customization, and multimodal data representation. This research contributes to designing more accessible data visualization tools for BLV users and advances the understanding of inclusive generative AI applications.},
isbn = {979-8-4007-0677-6},
file = {C:\Users\jseo1005\Zotero\storage\6I5E3HJ4\Seo et al. - 2024 - MAIDR Meets AI Exploring Multimodal LLM-Based Data Visualization Interpretation by and with Blind a.pdf}
}
@inproceedings{sharifShouldSayDisabled2022,
title = {Should {{I Say}} ``{{Disabled People}}'' or ``{{People}} with {{Disabilities}}''? {{Language Preferences}} of {{Disabled People Between Identity-}} and {{Person-First Language}}},
shorttitle = {Should {{I Say}} ``{{Disabled People}}'' or ``{{People}} with {{Disabilities}}''?},
booktitle = {Proceedings of the 24th {{International ACM SIGACCESS Conference}} on {{Computers}} and {{Accessibility}}},
author = {Sharif, Ather and McCall, Aedan Liam and Bolante, Kianna Roces},
year = {2022},
month = oct,
series = {{{ASSETS}} '22},
pages = {1--18},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
doi = {10.1145/3517428.3544813},
urldate = {2024-06-23},
abstract = {The usage of identity- (e.g., ``disabled people'') versus person-first language (e.g., ``people with disabilities'') to refer to disabled people has been an active and ongoing discussion. However, it remains unclear which semantic language should be used, especially for different disability categories within the overall demographics of disabled people. To gather and examine the language preferences of disabled people, we surveyed 519 disabled people from 23 countries. Our results show that 49\% of disabled people preferred identity-first language whereas 33\% preferred person-first language and 18\% had no preference. Additionally, we explore the intra-sectionality and intersectionality of disability categories, gender identifications, age groups, and countries on language preferences, finding that language preferences vary within and across each of these factors. Our qualitative assessment of the survey responses shows that disabled people may have multiple or no preferences. To make our survey data publicly available, we created an interactive and accessible live web platform, enabling users to perform intersectional exploration of language preferences. In a secondary investigation, using part-of-speech (POS) tagging, we analyzed the abstracts of 11,536 publications at ACM ASSETS (N=1,564) and ACM CHI (N=9,972), assessing their adoption of identity- and person-first language. We present the results from our analysis and offer recommendations for authors and researchers in choosing the appropriate language to refer to disabled people.},
isbn = {978-1-4503-9258-7},
keywords = {disability,identity-first,language,person-first,preferences,survey},
file = {C:\Users\jseo1005\Zotero\storage\MK9GF3U2\Sharif et al. - 2022 - Should I Say “Disabled People” or “People with Disabilities” Language Preferences of Disabled Peopl.pdf}
}
@inproceedings{spielNothingUsUs2020,
title = {Nothing {{About Us Without Us}}: {{Investigating}} the {{Role}} of {{Critical Disability Studies}} in {{HCI}}},
shorttitle = {Nothing {{About Us Without Us}}},
booktitle = {Extended {{Abstracts}} of the 2020 {{CHI Conference}} on {{Human Factors}} in {{Computing Systems}}},
author = {Spiel, Katta and Gerling, Kathrin and Bennett, Cynthia L. and Brul{\'e}, Emeline and Williams, Rua M. and Rode, Jennifer and Mankoff, Jennifer},
year = {2020},
month = apr,
pages = {1--8},
publisher = {ACM},
address = {Honolulu HI USA},
doi = {10.1145/3334480.3375150},
urldate = {2024-06-23},
isbn = {978-1-4503-6819-3},
langid = {english}
}
@misc{viraniBuildingAccessibleUsability2020,
title = {Building the {{Accessible Usability Scale}} - {{A Walkthrough}}},
author = {Virani, Abid},
year = {2020},
month = dec,
journal = {Fable},
urldate = {2024-04-17},
abstract = {The Accessible Usability Scale is built as a calculator on the Fable website, and available for you to use for free.},
howpublished = {https://makeitfable.com/article/building-the-accessible-usability-scale/},
langid = {american},
file = {C:\Users\jseo1005\Zotero\storage\ISZ2C6DA\building-the-accessible-usability-scale.html}
}