@article {zevin2023gravity, title = {Gravity Spy: Lessons Learned and a Path Forward}, journal = {European Physical Journal Plus}, volume = {139}, year = {2024}, month = {01/2024}, pages = {Article 100}, abstract = {

The Gravity Spy project aims to uncover the origins of glitches, transient bursts of noise that hamper analysis of gravitational-wave data. By using both the work of citizen-science volunteers and machine-learning algorithms, the Gravity Spy project enables reliable classification of glitches. Citizen science and machine learning are intrinsically coupled within the Gravity Spy framework, with machine-learning classifications providing a rapid first-pass classification of the dataset and enabling tiered volunteer training, and volunteer-based classifications verifying the machine classifications, bolstering the machine-learning training set and identifying new morphological classes of glitches. These classifications are now routinely used in studies characterizing the performance of the LIGO gravitational-wave detectors. Providing the volunteers with a training framework that teaches them to classify a wide range of glitches, as well as additional tools to aid their investigations of interesting glitches, empowers them to make discoveries of new classes of glitches. This demonstrates that, when giving suitable support, volunteers can go beyond simple classification tasks to identify new features in data at a level comparable to domain experts. The Gravity Spy project is now providing volunteers with more complicated data that includes auxiliary monitors of the detector to identify the root cause of glitches.

}, doi = {10.1140/epjp/s13360-023-04795-4}, author = {Michael Zevin and Corey B. Jackson and Zoheyr Doctor and Yunan Wu and Carsten {\O}sterlund and L. Clifton Johnson and Christopher P. L. Berry and Kevin Crowston and Scott B. Coughlin and Vicky Kalogera and Sharan Banagiri and Derek Davis and Jane Glanzer and Renzhi Hao and Aggelos K. Katsaggelos and Oli Patane and Jennifer Sanchez and Joshua Smith and Siddharth Soni and Laura Trouille and Marissa Walker and Irina Aerith and Wilfried Domainko and Victor-Georges Baranowski and Gerhard Niklasch and Barbara T{\'e}gl{\'a}s} } @proceedings {2023, title = {Design principles for background knowledge to enhance learning in citizen science}, year = {2023}, pages = {563{\textendash}580}, address = {Barcelona, Spain and virtual}, abstract = {

Citizen scientists make valuable contributions to science but need to learn about the data they are working with to be able to perform more advanced tasks. We present a set of design principles for identifying the kinds of background knowledge that are important to support learning at different stages of engagement, drawn from a study of how free/libre open source software developers are guided to create and use documents. Specifically, we suggest that newcomers require help understanding the purpose, form and content of the documents they engage with, while more advanced developers add understanding of information provenance and the boundaries, relevant participants and work processes. We apply those principles in two separate but related studies. In study 1, we analyze the background knowledge presented to volunteers in the Gravity Spy citizen-science project, mapping the resources to the framework and identifying kinds of knowledge that were not initially provided. In study 2, we use the principles proactively to develop design suggestions for Gravity Spy 2.0, which will involve volunteers in analyzing more diverse sources of data. This new project extends the application of the principles by seeking to use them to support understanding of the relationships between documents, not just the documents individually. We conclude by discussing future work, including a planned evaluation of Gravity Spy 2.0 that will provide a further test of the design principles.

}, doi = {10.1007/978-3-031-28032-0_43}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/Design_Background_iConf.pdf}, author = {Kevin Crowston and Corey Brian Jackson and Isabella Corieri and Carsten {\O}sterlund} } @article {2021, title = {Imagine All the People: Citizen Science, Artificial Intelligence, and Computational Research}, year = {2021}, institution = {Computing Community Consortium (CCC)}, address = {Washington, DC}, abstract = {

Machine learning, artificial intelligence, and deep learning have advanced significantly over the past decade. Nonetheless, humans possess unique abilities such as creativity, intuition, context and abstraction, analytic problem solving, and detecting unusual events. To successfully tackle pressing scientific and societal challenges, we need the complementary capabilities of both humans and machines. The Federal Government could accelerate its priorities on multiple fronts through judicious integration of citizen science and crowdsourcing with artificial intelligence (AI), Internet of Things (IoT), and cloud strategies.

}, url = {https://cra.org/ccc/wp-content/uploads/sites/2/2021/03/CCC-TransitionPaperImagine-All-the-People.pdf}, author = {Lea A. Shanley and Lucy Fortson and Tanya Berger-Wolf and Kevin Crowston and Pietro Michelucci} } @article {9999, title = {Building an apparatus: Refractive, reflective and diffractive readings of trace data}, journal = {Journal of the Association for Information Systems}, volume = {21}, year = {2020}, pages = {Article 10}, abstract = {

We propose a set of methodological principles and strategies for the use of trace data, i.e., data capturing performances carried out on or via information systems, often at a fine level of detail. Trace data comes with a number of methodological and theoretical challenges associated with the inseparable nature of the social and material. Drawing on Haraway and Barad{\textquoteright}s distinctions among refraction, reflection and diffraction, we compare three approaches to trace data analysis. We argue that a diffractive methodology allows us to explore how trace data are not given but created though construction of a research apparatus to study trace data. By focusing on the diffractive ways in which traces ripple through an apparatus, it is possible to explore some of the taken-for-granted, invisible dynamics of sociomateriality. Equally, important this approach allows us to describe what and when distinctions within entwined phenomena emerge in the research process. Empirically, we illustrate the guiding principles and strategies by analyzing trace data from Gravity Spy, a crowdsourced citizen science project on Zooniverse. We conclude by suggesting that a diffractive methodology may help us draw together quantitative and qualitative research practices in new and productive ways that also raises interesting design questions.

}, doi = {10.17705/1jais.00590 }, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/RA-JAIS-17-0130.R3.1_FIN\%20to\%20share.pdf}, author = {Carsten {\O}sterlund and Kevin Crowston and Corey Brian Jackson} } @proceedings {2018, title = {The Genie in the Bottle: Different Stakeholders, Different Interpretations of Machine Learning}, year = {2020}, type = {Working paper}, address = {Wailea, HI}, abstract = {

Machine learning (ML) constitute an algorithmic phenomenon with some distinctive characteristics (e.g., being trained, probabilistic). Our understanding of such systems is limited when it comes to how these unique characteristics play out in organizational settings and what challenges different groups of users will face in working with them. We explore how people developing or using an ML system come to understand its capabilities and challenges. We draw on the social construction of technology tradition to frame our analysis of interviews and discussion board posts involving designers and users of a ML-supported citizen-science crowdsourcing project named Gravity Spy. Our findings reveal some of the challenges facing different relevant social groups. We find that groups with less interaction with the technology have their understanding. We find that the type of understandings achieved by groups having less interaction with the technology is shaped by outside influences rather than the specifics of the system and its role in the project. Notable, some users mistake human input for ML input. This initial understanding of how different participants understand and engage with ML point to challenges that need to be overcome to help participants deal with the opaque position ML often hold in a work system.

}, doi = {10.24251/HICSS.2020.719 }, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/Social_Construction_of_ML_in_GS_HICCS2020.pdf}, author = {Mahboobeh Harandi and Kevin Crowston and Corey Jackson and Carsten {\O}sterlund} } @article {9998, title = {Knowledge Tracing to Model Learning in Online Citizen Science Projects}, journal = {IEEE Transactions on Learning Technologies}, volume = {13}, year = {2020}, pages = {123-134}, abstract = {

We present the design of a citizen science system that uses machine learning to guide the presentation of image classification tasks to newcomers to help them more quickly learn how to do the task while still contributing to the work of the project. A Bayesian model for tracking volunteer learning for training with tasks with uncertain outcomes is presented and fit to data from 12,986 volunteer contributors. The model can be used both to estimate the ability of volunteers and to decide the classification of an image. A simulation of the model applied to volunteer promotion and image retirement suggests that the model requires fewer classifications than the current system.

}, issn = {1939-1382}, doi = {10.1109/TLT.2019.2936480 }, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/transaction\%20paper\%20final\%20figures\%20in\%20text.pdf}, author = {Kevin Crowston and Carsten {\O}sterlund and Tae Kyoung Lee and Corey Brian Jackson and Mahboobeh Harandi and Sarah Allen and Sara Bahaadini and Scott Coughlin and Aggelos Katsaggelos and Shane Larson and Neda Rohani and Joshua Smith and Laura Trouille and Michael Zevin} } @article {9999, title = {Shifting forms of Engagement: Volunteer Learning in Online Citizen Science}, journal = {Proceedings of the ACM on Human-Computer Interaction}, year = {2020}, pages = {36}, abstract = {

Open collaboration platforms involve people in many tasks, from editing articles to analyzing datasets. To facilitate mastery of these practices, communities offer a number of learning resources, ranging from project-defined FAQs to individually-oriented search tools and communal discussion boards. However, it is not clear which project resources best support participant learning, overall and at different stages of engagement with the project. We draw on S{\o}rensen{\textquoteright}s framework of forms of presence to distinguish three forms of engagement with learning resources: authoritative, agent-centered and communal. We analyzed trace data from the GravitySpy citizen-science project using a mixed-effects logistic regression with volunteer performance as an outcome variable. The findings suggest that engagement with authoritative resources (e.g., those constructed by project organizers) facilitates performance initially. However, as tasks become more difficult, volunteers seek and benefit from engagement with their own agent-centered resources and community generated resources. These findings suggest a broader scope for the design of learning resources for online communities.

}, doi = {10.1145/3392841}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/3392841.pdf}, author = {Corey Brian Jackson and Carsten {\O}sterlund and Mahboobeh Harandi and Kevin Crowston and Laura Trouille} } @article {9999, title = {Teaching Citizen Scientists to Categorize Glitches using Machine-Learning-Guided Training}, journal = {Computers in Human Behavior}, volume = {105}, year = {2020}, pages = {106198}, abstract = {

Training users in online communities is important for making high performing contributors. However, several conundrums exists in choosing the most effective approaches to training users. For example, if it takes time to learn to do the task correctly, then the initial contributions may not be of high enough quality to be useful. We conducted an online field experiment where we recruited users (N = 386) in a web-based citizen-science project to evaluate the two training approaches. In one training regime, users received one-time training and were asked to learn and apply twenty classes to the data. In the other approach, users were gradually exposed to classes of data that were selected by trained machine learning algorithms as being members of particular classes. The results of our analysis revealed that the gradual training produced {\textquotedblleft}high performing contributors{\textquotedblright}. In our comparison of the treatment and control groups we found users who experienced gradual training performed significantly better on the task (an average accuracy of 90\% vs. 54\%), contributed more work (an average of 228 vs. 121 classifications), and were retained in the project for a longer period of time (an average of 2.5 vs. 2 sessions). The results suggests online production communities seeking to train newcomers would benefit from training regimes that gradually introduce them to the work of the project using real tasks.

}, doi = {10.1016/j.chb.2019.106198}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/MLGT-preprint.pdf}, author = {Corey Jackson and Carsten {\O}sterlund and Kevin Crowston and Mahboobeh Harandi and Sarah Allen and Sara Bahaadini and Scott Coughlin and Vicky Kalogera and Aggelos Katsaggelos and Shane Larson and Neda Rohani and Joshua Smith and Laura Trouille and Michael Zevin} } @article {2019, title = {Classifying the unknown: Discovering novel gravitational-wave detector glitches using similarity learning}, journal = {Physical Review D}, volume = {99}, year = {2019}, pages = {082002}, abstract = {

The observation of gravitational waves from compact binary coalescences by LIGO and Virgo has begun a new era in astronomy. A critical challenge in making detections is determining whether loud transient features in the data are caused by gravitational waves or by instrumental or environmental sources. The citizen-science project Gravity Spy has been demonstrated as an efficient infrastructure for classifying known types of noise transients (glitches) through a combination of data analysis performed by both citizen volunteers and machine learning. We present the next iteration of this project, using similarity indices to empower citizen scientists to create large data sets of unknown transients, which can then be used to facilitate supervised machine-learning characterization. This new evolution aims to alleviate a persistent challenge that plagues both citizen-science and instrumental detector work: the ability to build large samples of relatively rare events. Using two families of transient noise that appeared unexpectedly during LIGO{\textquoteright}s second observing run, we demonstrate the impact that the similarity indices could have had on finding these new glitch types in the Gravity Spy program.

}, issn = {2470-0010}, doi = {10.1103/PhysRevD.99.082002}, author = {Scott Coughlin and Sara Bahaadini and Neda Rohani and Michael Zevin and Patane, Oli and Mahboobeh Harandi and Corey Brian Jackson and Noroozi, V. and Sarah Allen and Areeda, J. and Coughlin, M. and Ruiz, P. and Berry, C. P. L. and Kevin Crowston and Aggelos Katsaggelos and Andrew Lundgren and Carsten {\O}sterlund and Joshua Smith and Laura Trouille and Vicky Kalogera} } @article {9999, title = {Coordinating Advanced Crowd Work: Extending Citizen Science}, journal = {Citizen Science: Theory and Practice}, volume = {4}, year = {2019}, pages = {1{\textendash}12}, abstract = {

Crowdsourcing work with high levels of coupling between tasks poses challenges for coordination. This paper presents a study of two online citizen science projects that involved volunteers in such tasks: not just analyzing bulk data but also interpreting data and writing a paper for publication in one project and identifying new classes of data in the other. However, extending the reach of citizen science adds tasks with more dependencies, which calls for more elaborate coordination mechanisms but the relationship between the project and volunteers limits how work can be coordinated. Contrariwise, a mismatch between dependencies and available coordination mechanisms can be expected to lead to performance problems. The results of the study offer recommendations for design of citizen science projects for advanced tasks.

}, doi = {10.5334/cstp.166}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/Quench\%20to\%20share.pdf}, author = {Kevin Crowston and Mitchell, Erica Michelle and Carsten {\O}sterlund} } @proceedings {9999, title = {Linguistic adoption in online citizen science: A structurational perspective}, year = {2019}, address = {Munich, Germany}, abstract = {

For peer-production projects to be successful, members must develop a specific and common language that enables them to cooperate. We address the question of what factors affect the development of shared language in open peer production communities? Answering this question is important because we want the communities to be productive even when self-managed, which requires understanding how shared language emerges. We examine this question using a structurational lens in the setting of a citizen science project. Examining the use of words in the Gravity Spy citizen science project, we find that many words are reused and that most novel words that are introduced are not picked up, showing reproduction of structure. However, some novel words are used by others, showing an evolution of the structure. Participants with roles closer to the science are more likely to have their words reused, showing the mutually reinforcing nature of structures of signification, legitimation and domination.

}, url = {https://aisel.aisnet.org /icis2019/crowds_social/crowds_social/28/}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/Linguistic\%20Adoption\%20\%28ICIS\%29\%20final.pdf}, author = {Corey Brian Jackson and Carsten {\O}sterlund and Mahboobeh Harandi and Dhruv Kharwar and Kevin Crowston} } @article {9999, title = {Appealing to different motivations in a message to recruit citizen scientists: results of a field experiment}, journal = {Journal of Science Communication}, volume = {17}, year = {2018}, chapter = {A02}, abstract = {

This study examines the relative efficacy of citizen science recruitment messages appealing to four motivations that were derived from previous research on motives for participation in citizen-science projects. We report on an experiment (N=36,513) that compared the response to email messages designed to appeal to these four motives for participation. We found that the messages appealing to the possibility of contributing to science and learning about science attracted more attention than did one about helping scientists but that one about helping scientists generated more initial contributions. Overall, the message about contributing to science resulted in the largest volume of contributions and joining a community, the lowest. The results should be informative to those managing citizen-science projects.

}, keywords = {Citizen Science}, doi = {10.22323/2.17010202}, url = {https://jcom.sissa.it/archive/17/01/JCOM_1701_2018_A02}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/JCOM_1701_2018_A02.pdf}, author = {Lee, Tae Kyoung and Kevin Crowston and Mahboobeh Harandi and Carsten {\O}sterlund and Grant Miller} } @proceedings {9999, title = {Coordinating advanced crowd work: Extending citizen science}, year = {2018}, edition = {51st}, abstract = {Crowdsourcing work with high levels of coupling between tasks poses challenges for coordination. This paper presents a study of an online citizen science project that involved volunteers in such tasks: not just analyzing bulk data but also interpreting data and writing a paper for publication. However, extending the reach of citizen science adds tasks with more dependencies, which calls for more elaborate coordination mechanisms but the relationship between the project and volunteers limits how work can be coordinated. Contrariwise, a mismatch between dependencies and available coordination mechanisms can be expected to lead to performance problems. The results of the study offer recommendations for design of crowdsourcing of more complex tasks. }, doi = {10.24251/HICSS.2018.212}, url = {http://hdl.handle.net/10125/50099}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/Quench\%20to\%20distribute.pdf}, author = {Kevin Crowston and Mitchell, Erica Michelle and Carsten {\O}sterlund} } @article {2018, title = {Did they login? Patterns of anonymous contributions to online communities}, journal = {Proceedings of the ACM on Human-Computer Interaction}, volume = {2}, year = {2018}, pages = {Article 77}, abstract = {

Researchers studying user behaviors in online communities often conduct analyses of events collected in system logs, e.g., a system{\textquoteright}s record of a comment post or of a contribution. However, analysis of user behaviors is more difficult if users make contributions without being logged-in (i.e., anonymously). Since a user{\textquoteright}s account will not be associated with contributions that user makes anonymously, conclusions about user behaviors that look only at attributed actions might not account for a user{\textquoteright}s full experience. To understand the impacts of anonymous contributions on research, we conducted an analysis of system logs containing anonymous activities in two online citizen science projects. By linking anonymous events with user IDs we found that (1) many users contribute anonymously, though with varied patterns of contribution; and (2) including anonymous activities alter conclusions made about users{\textquoteright} experience with the project. These results suggest that researchers of human behaviors in online communities should consider the possible impacts of anonymous interaction on their ability to draw conclusions about user behaviors in these settings.

}, doi = {10.1145/3274346}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/anonymous-contributions-cameraready.pdf}, author = {Corey Brian Jackson and Kevin Crowston and Carsten {\O}sterlund} } @article {9999, title = {Folksonomies to support coordination and coordination of folksonomies}, journal = {Computer Supported Cooperative Work}, volume = {27}, year = {2018}, pages = {647{\textendash}678}, abstract = {

Members of highly-distributed groups in online production communities face challenges in achieving coordinated action. Existing CSCW research highlights the importance of shared language and artifacts when coordinating actions in such settings. To better understand how such shared language and artifacts are, not only a guide for, but also a result of collaborative work we examine the development of folksonomies (i.e., volunteer-generated classification schemes) to support coordinated action. Drawing on structuration theory, we conceptualize a folksonomy as an interpretive schema forming a structure of signification. Our study is set in the context of an online citizen-science project, Gravity Spy, in which volunteers label "glitches" (noise events recorded by a scientific instrument) to identify and name novel classes of glitches. Through a multi-method study combining virtual and trace ethnography, we analyze folksonomies and the work of labelling as mutually constitutive, giving folksonomies a dual role: an emergent folksonomy supports the volunteers in labelling images at the same time that the individual work of labelling images supports the development of a folksonomy. However, our analysis suggests that the lack of supporting norms and authoritative resources (structures of legitimation and domination) undermines the power of the folksonomy and so the ability of volunteers to coordinate their decisions about naming novel glitch classes. These results have implications design. If we hope to support the development of emergent folksonomies online production communities need to facilitate 1) tag gardening, a process of consolidating overlapping terms of artifacts; 2) demarcate a clear home for discourses around folksonomy disagreements; 3) highlight clearly when decisions have been reached; and 4) inform others about those decisions.

}, doi = {10.1007/s10606-018-9327-z}, url = {https://rdcu.be/NZ7E}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/ECSCW-Paper-Final.pdf}, author = {Corey Brian Jackson and Kevin Crowston and Carsten {\O}sterlund and Mahboobeh Harandi} } @article {9999, title = {Stages of motivation for contributing user-generated content: A theory and empirical test}, journal = {International Journal of Human-Computer Studies}, volume = {109}, year = {2018}, pages = {89-101}, publisher = {Syracuse University}, address = {Syracuse, NY}, abstract = {

User-generated content (UGC) projects involve large numbers of mostly unpaid contributors collaborating to create content. Motivation for such contributions has been an active area of research. In prior research, motivation for contribution to UGC has been considered a single, static and individual phenomenon. In this paper, we argue that it is instead three separate but interrelated phenomena. Using the theory of helping behaviour as a framework and integrating social movement theory, we propose a stage theory that distinguishes three separate sets (initial, sustained and meta) of motivations for participation in UGC. We test this theory using a data set from a Wikimedia Editor Survey (Wikimedia Foundation, 2011). The results suggest several opportunities for further refinement of the theory but provide support for the main hypothesis, that different stages of contribution have distinct motives. The theory has implications for both researchers and practitioners who manage UGC projects.

}, doi = {10.1016/j.ijhcs.2017.08.005 }, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/crowston\%20fagnot\%20to\%20distribute.pdf}, author = {Kevin Crowston and Fagnot, Isabelle} } @proceedings {9999, title = {Blending machine and human learning processes}, year = {2017}, abstract = {

Citizen science projects rely on contributions from volunteers to achieve their scientific goals and so face a dilemma: providing volunteers with explicit training might increase the quality of contributions, but at the cost of losing the work done by newcomers during the training period, which for many is the only work they will contribute to the project. Based on research in cognitive science on how humans learn to classify images, we have designed an approach to use machine learning to guide the presentation of tasks to newcomers that help them more quickly learn how to do the image classification task while still contributing to the work of the project. A Bayesian model for tracking this learning is presented.

}, doi = {10.24251/HICSS.2017.009}, url = {http://hdl.handle.net/10125/41159}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/training\%20v3\%20to\%20share_0.pdf}, author = {Kevin Crowston and Carsten {\O}sterlund and Lee, Tae Kyoung} } @article {9999, title = {Gamers, citizen scientists, and data: Exploring participant contributions in two games with a purpose}, journal = {Computers in Human Behavior}, volume = {68}, year = {2017}, pages = {254{\textendash}268}, abstract = {

Two key problems for crowd-sourcing systems are motivating contributions from participants and ensuring the quality of these contributions. Games have been suggested as a motivational approach to encourage contribution, but attracting participation through game play rather than intrinsic interest raises concerns about the quality of the contributions provided. These concerns are particularly important in the context of citizen science projects, when the contributions are data to be used for scientific research. To assess the validity of concerns about the effects of gaming on data quality, we compare the quality of data obtained from two citizen science games, one a {\textquotedblleft}gamified{\textquotedblright} version of a species classification task and one a fantasy game that used the classification task only as a way to advance in the game play. Surprisingly, though we did observe cheating in the fantasy game, data quality (i.e., classification accuracy) from participants in the two games was not significantly different. As well, data from short-time contributors was also at a usable level of accuracy. Finally, learning did not seem to affect data quality in our context. These findings suggest that various approaches to gamification can be useful for motivating contributions to citizen science projects.

}, doi = {10.1016/j.chb.2016.11.035}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/chb2016.pdf}, author = {Nathan Prestopnik and Kevin Crowston and Wang, Jun} } @article {2017, title = {Gravity Spy: Humans, machines and the future of citizen science}, year = {2017}, address = {Portland, OR}, abstract = {Gravity Spy is a citizen science project that draws on the contributions of both humans and machines to achieve its scientific goals. The system supports the Laser Interferometer Gravitational Observatory (LIGO) by classifying {\textquotedblleft}glitches{\textquotedblright} that interfere with observations. The system makes three advances on the current state of the art: explicit training for new volunteers, synergy between machine and human classification and support for discovery of new classes of glitch. As well, it provides a platform for human-centred computing research on motivation, learning and collaboration. The system has been launched and is currently in operation.}, doi = {10.1145/3022198.3026329}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/cpa137-crowstonA.pdf}, author = {Kevin Crowston and The Gravity Spy Team} } @article {657, title = {Gravity Spy: Integrating Advanced LIGO Detector Characterization, Machine Learning, and Citizen Science}, journal = {Classical and Quantum Gravity}, volume = {34}, year = {2017}, pages = {064003}, type = {Journal Article}, doi = {10.1088/1361-6382/aa5cea}, author = {Michael Zevin and Scott Coughlin and Sara Bahaadini and Emre Besler and Neda Rohani and Sarah Allen and Miriam Cabero and Kevin Crowston and Aggelos Katsaggelos and Shane Larson and Tae Kyoung Lee and Chris Lintott and Tyson Littenberg and Andrew Lundgren and Carsten Oesterlund and Joshua Smith and Laura Trouille and Vicky Kalogera} } @inbook {9999, title = {Levels of trace data for social and behavioural science research}, booktitle = {Big Data Factories: Collaborative~Approaches}, year = {2017}, publisher = {Springer Nature}, organization = {Springer Nature}, abstract = {

The explosion of data available from online systems such as social media is creating a wealth of trace data, that is, data that record evidence of human activity. The volume of data available offers great potential to advance social and behavioural science research. However, the data are of a very different kind than more conventional social and behavioural science data, posing challenges to use. This paper adopts a data framework from Earth Observation science and applies it to trace data to identify possible issues in analyzing trace data. Application of the framework also reveals issues for sharing and reusing data.

}, isbn = {978-3-319-59186-5}, doi = {10.1007/978-3-319-59186-5_4}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/160529\%20levels\%20book\%20chapter_0.pdf}, author = {Kevin Crowston}, editor = {Sorin Matei and Sean Goggins and Nicolas Jullien} } @article {2017, title = {Recruiting messages matter: Message strategies to attract citizen scientists}, year = {2017}, type = {Poster}, address = {Portland, OR}, abstract = {Although participation of citizen scientists is critical for a success of citizen science projects (a distinctive form of crowdsourcing), little attention has been paid to what types of messages can effectively recruit citizen scientists. Derived from previous studies on citizen scientists{\textquoteright} motivations, we created and sent participants one of four recruiting messages for a new project, Gravity Spy, appealing to different motivations (i.e., learning about science, social proof, contribution to science, and altruism). Counter to earlier studies on motivation, our results showed that messages appealing to learning, contribution and social proof were more effective than a message appealing to altruism. We discuss the inconsistency between the present and prior study results and plans for future work.}, doi = {10.1145/3022198.3026335}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/cpa143-leeA.pdf}, author = {Lee, Tae Kyoung and Kevin Crowston and Carsten {\O}sterlund and Grant Miller} } @article {642, title = {Encouraging Work in Citizen Science: Experiments in Goal Setting and Anchoring}, year = {2016}, abstract = {This paper describes the results of an online field experiment where we designed and analyzed the effects of a goal-setting tracker in an online citizen science project - Floating Forest. The design of our tracker was influenced by psychology theories of anchoring and goal-setting. Our results of our experiment revealed: (1) setting goals increases annotations in a session; (2) numeric anchors influence goals; and (3) participants in the treatment who saw a prompt but did not set a goal, contributed more annotations than the participants in the control group. Our research shows how goal-setting and anchoring combine to increase work in online communities.}, doi = {10.1145/2818052.2869129}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/CSCW-abstract.pdf}, author = {Corey Brian Jackson and Kevin Crowston and Gabriel Mugar and Carsten {\O}sterlund} } @proceedings {9999, title = {{\textquotedblleft}Guess what! You{\textquoteright}re the first to see this event{\textquotedblright}: Increasing Contribution to Online Production Communities}, year = {2016}, abstract = {

In this paper, we describe the results of an online field experiment examining the impacts of messaging about task novelty on the volume of volunteers{\textquoteright} contributions to an online citizen science project. Encouraging volunteers to provide a little more content as they work is an attractive strategy to increase the community{\textquoteright}s output. Prior research found that an important motivation for participation in online citizen science is the wonder of being the first person to observe a particular image. To appeal to this motivation, a pop-up message was added to an online citizen science project that alerted volunteers when they were the first to annotate a particular image. Our analysis reveals that new volunteers who saw these messages increased the volume of annotations they contributed. The results of our study suggest an additional strategy to increase the amount of work volunteers contribute to online communities and citizen science projects specifically.

}, doi = {10.1145/2957276.2957284}, author = {Corey Brian Jackson and Kevin Crowston and Gabriel Mugar and Carsten {\O}sterlund} } @article {9999, title = {The Hermeneutics of Trace Data: Building an Apparatus}, year = {2016}, abstract = {When people interact via information systems, the data is captured by the systems as a side effect of the interaction. These data are increasingly interesting and available for research. In a sense, these systems become a new kind of research apparatus, and like all advances in instrumentation, open up new areas of study with the potential for discovery. While at first glance, such {\textquotedblleft}big data{\textquotedblright} analysis seems to be most suitable for a positivist quantitative research approach. However, a closer inspection reveals that interpretive research strategies may better support the challenges associated with digital trace data. By merging insights from hermeneutics and sociomateriality, we argue that trace data analysis entails the building of a research apparatus. Hermeneutic principles play a key role in the application of this apparatus and allow researchers to make sense of the often partial traces left by online participants. Drawing on longitudinal trace data from a study of citizen science practices the paper illustrates the value of merging insights from hermeneutics with sociomaterial insights. The approach allows researchers to account for not only the material dynamics of digital trace data but also the temporal dimension of online practices. }, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/Crowston_Osterlund_Jackson_Mugar_The_Hermeneutics_of_Trace_Data_IFIP8.2_2016\%20to\%20distribute.pdf}, author = {Carsten {\O}sterlund and Kevin Crowston and Corey Jackson} } @proceedings {2016, title = {Which Way Did They Go? Newcomer Movement through the Zooniverse}, year = {2016}, month = {2016}, abstract = {

Research on newcomer roles in peer production sites (e.g., Wikipedia) is characterized by a broad and relatively well-articulated set of functionally and culturally recognizable roles. But not all communities come with well-defined roles that newcomers can aspire to occupy. The present study explores activity clusters newcomers create when faced with few recognizable roles to fill and limited access to other participants{\textquoteright} work that serves as an exemplar. Drawing on a mixed method research design, we present findings from an analysis of 1,687 newcomers{\textquoteright} sessions in a citizen science project. Combining session- and individual-level analysis produced three findings (1) newcomers activities manifest a diverse range of session types; (2) Newcomers toggle between light work sessions and more involved types of production or community engagement; (3) an interesting relationship between high-level contributors who do a lot of work but little talk and a small group that does a lot of talk but less work. The former group draws heavily on posts contributed by the latter group. Identifying shifts and regularities in contribution facilitate improved mechanisms for engaging participants and the design of online citizen science communities.

}, doi = {10.1145/2818048.2835197}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/CSCW2016-Roles.pdf}, author = {Corey Brian Jackson and Carsten {\O}sterlund and Maidel, Veronica and Kevin Crowston and Gabriel Mugar} } @proceedings {2015, title = {Being Present in Online Communities: Learning in Citizen Science}, year = {2015}, address = {Limerick, Ireland}, abstract = {

How online community members learn to become valuable contributors constitutes a long-standing concern of Community \& Technology researchers. The literature tends to highlight participants{\textquoteright} access to practice, feedback from experienced members, and relationship building. However, not all crowdsourcing environments offer participants opportunities for access, feedback, and relationship building (e.g., Citizen Science). We study how volunteers learn to participate in a citizen science project, Planet Hunters, through participant observation, interviews, and trace ethnography. Drawing on S{\o}rensen{\textquoteright}s sociomaterial theories of presence, we extend the notion of situated learning to include several modes of learning. The empirical findings suggest that volunteers in citizen science engage more than one form of access to practice, feedback, and relationship building. Communal relations characterize only one form of learning. Equally important to their learning are authority{\textendash}subject and agent-centered forms of access, feedback, and relationship building.

}, doi = {10.1145/2768545.2768555}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/C\%26T_2015_FINAL.pdf}, author = {Gabriel Mugar and Carsten {\O}sterlund and Corey Jackson and Kevin Crowston} } @proceedings {2015, title = {Motivations for sustained participation in crowdsourcing: The role of talk in a citizen science case study}, year = {2015}, month = {1/2015}, address = {Koloa, HI}, abstract = {

The paper explores the motivations of volunteers in a large crowd sourcing project and contributes to our understanding of the motivational factors that lead to deeper engagement beyond initial participation. Drawing on the theory of legitimate peripheral participation (LPP) and the literature on motivation in crowd sourcing, we analyze interview and trace data from a large citizen science project. The analyses identify ways in which the technical features of the projects may serve as motivational factors leading participants towards sustained participation. The results suggest volunteers first engage in activities to support knowledge acquisition and later share knowledge with other volunteers and finally increase participation in Talk through a punctuated process of role discovery.

}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/Motivation\%20in\%20Talk\%20Submitted_FINAL\%28Formatted\%29.pdf}, author = {Corey Brian Jackson and Carsten {\O}sterlund and Gabriel Mugar and Kevin Crowston and Katie DeVries Hassman} } @article {2015, title = {Surveying the citizen science landscape}, journal = {First Monday}, volume = {26}, number = {1}, year = {2015}, month = {1/2015}, type = {Journal Article}, abstract = {

Citizen science has seen enormous growth in recent years, in part due to the influence of the Internet, and a corresponding growth in interest. However, the few stand-out examples that have received attention from media and researchers are not representative of the diversity of the field as a whole, and therefore may not be the best models for those seeking to study or start a citizen science project. In this work, we present the results of a survey of citizen science project leaders, identifying sub-groups of project types according to a variety of features related to project design and management, including funding sources, goals, participant activities, data quality processes, and social interaction. These combined features highlight the diversity of citizen science, providing an overview of the breadth of the phenomenon and laying a foundation for comparison between citizen science projects and to other online communities.

}, doi = {10.5210/fm.v20i1.5520}, url = {https://journals.uic.edu/ojs/index.php/fm/article/view/5520}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/Surveying\%20the\%20citizen\%20science\%20landscape.pdf}, author = {Wiggins, Andrea and Kevin Crowston} } @proceedings {9999, title = {Exploring data quality in games with a purpose}, year = {2014}, month = {3/2014}, address = {Berlin, Germany}, abstract = {

A key problem for crowd-sourcing systems is motivating contributions from participants and ensuring the quality of these contributions. Games have been suggested as a motivational approach to encourage contribution, but attracting participation through game play rather than scientific interest raises concerns about the quality of the data provided, which is particularly important when the data are to be used for scientific research. To assess whether these concerns are justified, we compare the quality of data obtained from two citizen science games, one a {\textquotedblleft}gamified{\textquotedblright} version of a species classification task and one a fantasy game that used the classification task only as a way to advance in the game play. Surprisingly, though we did observe cheating in the fantasy game, data quality (i.e., classification accuracy) from participants in the two games was not significantly different. As well, the quality of data from short-time contributors was at a usable level of accuracy. These findings suggest that various approaches to gamification can be useful for motivating contributions to citizen science projects.

}, doi = {10.9776/14066}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/gamedataquality_cameraready_4.pdf}, author = {Nathan Prestopnik and Kevin Crowston and Wang, Jun} } @proceedings {9999, title = {Planet Hunters and Seafloor Explorers: Legitimate Peripheral Participation Through Practice Proxies in Online Citizen Science}, year = {2014}, month = {2/2014}, abstract = {

Making the traces of user participation in primary activities visible in online crowdsourced initiatives has been shown to help new users understand the norms of participation but participants do not always have access to others{\textquoteright} work. Through a combination of virtual and trace ethnography we explore how new users in two online citizen science projects engage other traces of activity as a way of compensating. Merging the theory of legitimate peripheral participation with Erickson and Kellogg{\textquoteright}s theory of social translucence we introduce the concept of practice proxies; traces of user activities in online environment that act as resources to orient newcomers towards the norms of practice. Our findings suggest that newcomers seek out practice proxies in the social features of the projects that highlight contextualized and specific characteristics of primary work practice.

}, doi = {10.1145/2531602.2531721}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/paper_revised\%20copy\%20to\%20post.pdf}, author = {Gabriel Mugar and Carsten {\O}sterlund and Katie DeVries Hassman and Kevin Crowston and Corey Brian Jackson} } @conference {Crowston:2013, title = {Motivation and data quality in a citizen science game: A design science evaluation}, booktitle = {Forty-sixth Hawai{\textquoteright}i International Conference on System Sciences (HICSS-46)}, year = {2013}, month = {1/2013}, address = {Wailea, HI}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/hicss2013citizensort_cameraready.pdf}, author = {Kevin Crowston and Nathan Prestopnik} } @proceedings {crowston2012, title = {Amazon Mechanical Turk: A research tool for organizations and information systems scholars}, volume = {389}, year = {2012}, month = {12/2012}, pages = {210-221}, publisher = {Springer}, address = {Tampa, FL}, isbn = {978-3-642-35141-9}, issn = {1868-4238}, doi = {10.1007/978-3-642-35141-9}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/3890210.pdf}, author = {Kevin Crowston}, editor = {Anol Bhattacherjee and Brian Fitzgerald} } @proceedings {9999, title = {Citizen science system assemblages: Understanding the technologies that support crowdsourced science}, year = {2012}, month = {2/2012}, address = {Toronto, Ontario}, abstract = {We explore the nature of technologies to support citizen science, a method of inquiry that leverages the power of crowds to collect and analyze scientific data. We evaluate these technologies as system assemblages, collections of interrelated functionalities that support specific activities in pursuit of overall project goals. The notion of system assemblages helps us to explain how different citizen science platforms may be comprised of widely varying functionalities, yet still support relatively similar goals. Related concepts of build vs. buy and web satisfiers vs. web motivators are used to explore how different citizen science functionalities may lead to successful project outcomes. Four detailed case studies of current citizen science projects encompassing a cross-section of varying project sizes, resource levels, technologies, and approaches to inquiry help us to answer the following research questions: 1) What do typical system assemblages for citizen science look like? 2) What factors influence the composition of a system assemblage for citizen science? 3) What effect does the assemblage composition have on scientific goals, participant support, motivation, and satisfaction? and 4) What are the design implications for the system assemblage perspective on citizen science technologies?}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/citizensciencesystemassemblage.pdf}, author = {Nathan Prestopnik and Kevin Crowston} } @article {2012, title = {The future of citizen science: emerging technologies and shifting paradigms}, journal = {Frontiers in Ecology and the Environment}, volume = {10}, year = {2012}, month = {08/2012}, pages = {298{\textendash}304}, abstract = {

Citizen science creates a nexus between science and education that, when coupled with emerging technologies, expands the frontiers of ecological research and public engagement. Using representative technologies and other examples, we examine the future of citizen science in terms of its research processes, program and participant cultures, and scientific communities. Future citizen-science projects will likely be influenced by sociocultural issues related to new technologies and will continue to face practical programmatic challenges. We foresee networked, open science and the use of online computer/video gaming as important tools to engage non-traditional audiences, and offer recommendations to help prepare project managers for impending challenges. A more formalized citizen-science enterprise, complete with networked organizations, associations, journals, and cyberinfrastructure, will advance scientific research, including ecology, and further public education.

}, issn = {1540-9295}, doi = {10.1890/110294}, url = {http://www.esajournals.org/doi/pdf/10.1890/110294}, author = {Newman, Greg and Wiggins, Andrea and Crall, Alycia and Graham, Eric and Newman, Sarah and Kevin Crowston} } @proceedings {2012, title = {Goals and tasks: Two typologies of citizen science projects}, year = {2012}, month = {1/2012}, address = {Wailea, HI}, abstract = {

Citizen science is a form of research collaboration involving members of the public in scientific research projects to address real-world problems. Often organized as a virtual collaboration, these projects are a type of open movement, with collective goals addressed through open participation in research tasks. We conducted a survey of citizen science projects to elicit multiple aspects of project design and operation. We then clustered projects based on the tasks performed by participants and on the project{\textquoteright}s stated goals. The clustering results group projects that show similarities along other dimensions, suggesting useful divisions of the projects.

}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/hicss-45-final.pdf}, author = {Wiggins, Andrea and Kevin Crowston} } @unpublished {2012, title = {Poster: Socially intelligent computing to support citizen science}, year = {2012}, month = {6/2012}, publisher = {Syracuse University School of Information Studies}, type = {Unpublished poster, presented at the SOCS PIs meeting}, address = {Syracuse, NY}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/SOCS\%20CS\%20SOCS\%20PI\%20poster\%20small.pdf}, author = {Kevin Crowston} } @conference {Prestopnik:2012a, title = {Purposeful gaming \& socio-computational systems: A citizen science design case}, booktitle = {Group {\textquoteright}12 Conference}, year = {2012}, month = {10/2012}, address = {Sanibel Island, FL, USA}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/citizensort_cameraready.pdf}, author = {Nathan Prestopnik and Kevin Crowston} } @article {192, title = {Citizen Science System Assemblages: Toward Greater Understanding of Technologies to Support Crowdsourced Science}, year = {2011}, month = {6/2011}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/system_assemblage_0.pdf}, author = {Nathan Prestopnik and Kevin Crowston} } @article {374, title = {Citizen science system assemblages: Toward greater understanding of technologies to support crowdsourced science}, year = {2011}, month = {06/2011}, publisher = {Syracuse University School of Information Studies}, type = {Unpublished working paper}, abstract = {We explore the nature of technologies to support citizen science, a method of inquiry that leverages the power of crowds to collect and analyze scientific data. We evaluate these technologies as system assemblages, collections of interrelated functionalities that support specific activities in pursuit of overall project goals. The notion of system assemblages helps us to explain how different citizen science platforms may be comprised of widely varying functionalities, yet still support relatively similar goals. Related concepts of build vs. buy, support for science vs. support for participants, and web satisfiers vs. web motivators are used to explore how different citizen science functionalities may lead to successful project outcomes. Four detailed case studies of current citizen science projects encompassing a cross-section of varying project sizes, resource levels, technologies, and approaches to inquiry help us to answer the following research questions: 1) What factors influence the composition of a system assemblage for citizen science? 2) What do typical system assemblages for citizen science look like? 3) What effect does the assemblage composition have on scientific goals, participant support, motivation, and satisfaction? and 4) What are the design implications for the system assemblage perspective on citizen science technologies?}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/system_assemblage.pdf}, author = {Nathan Prestopnik and Kevin Crowston} } @article {9999, title = {Describing public participation in scientific research}, year = {2011}, publisher = {Syracuse University School of Information Studies}, abstract = {We report the results of a descriptive survey of citizen science projects, a form of scientific collaboration engaging members of the public with professional researchers. This phenomenon has seen explosive growth in recent years and is garnering interest from a broadening variety of research domains. However, the lack of adequate description of this diverse population hinders useful research. To address this gap, we conducted a survey of citizen science projects. We present a description of the phenomenon to establish a basis for sampling and evaluation of research on citizen science, including details on project resources, participation, technologies, goals, and outcomes. We then reflect on several points of potential development, including technologies to support participation, potential for expanding engagement, and data policies. The diverse organizational and functional arrangements in citizen science projects suggest a variety of areas for future research.}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/iConference2012.pdf}, author = {Wiggins, Andrea and Kevin Crowston} } @unpublished {9999, title = {Exploring Collective Intelligence Games With Design Science: A Citizen Science Design Case}, year = {2011}, abstract = {Citizen science is a form of collective intelligence where members of the public are recruited to contribute to scientific investigations. Citizen science projects often use web-based systems to support collaborative scientific activities, but finding ways to attract participants and confirm the veracity of the data produced by non-scientists are key research questions. We describe a series of web-based tools and games currently under development to support taxonomic classification of organisms in photographs collected by citizen science projects. In the design science tradition, the systems are purpose-built to test hypotheses about participant motivation and techniques for ensuring data quality. Findings from preliminary evaluation and the design process itself are discussed.}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/designing\%20citizen\%20science\%20games.pdf}, author = {Nathan Prestopnik and Kevin Crowston} } @conference {2011, title = {From Conservation to Crowdsourcing: A Typology of Citizen Science}, booktitle = {Proceedings of the Forty-fourth Hawai{\textquoteright}i International Conference on System Sciences (HICSS-44)}, year = {2011}, month = {1/2011}, address = {Koloa, HI}, abstract = {

Citizen science is a form of research collaboration involving members of the public in scientific research projects to address real-world problems. Often organized as a virtual collaboration, these projects are a type of open movement, with collective goals addressed through open participation in research tasks. Existing typologies of citizen science projects focus primarily on the structure of participation, paying little attention to the organizational and macrostructural properties that are important to designing and managing effective projects and technologies. By examining a variety of project characteristics, we identified five types{\textemdash}Action, Conservation, Investigation, Virtual, and Education{\textemdash}that differ in primary project goals and the importance of physical environment to participation.

}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/hicss-44.pdf}, author = {Wiggins, Andrea and Kevin Crowston} } @article {2010, title = {Developing a Conceptual Model of Virtual Organizations for Citizen Science}, journal = {International Journal of Organizational Design and Engineering}, volume = {1}, year = {2010}, month = {9/2010}, pages = {148-162}, type = {Research Note}, abstract = {

This paper develops an organization design-oriented conceptual model of scientific knowledge production through citizen science virtual organizations. Citizen science is a form of organization design for collaborative scientific research involving scientists and volunteers, for which Internet-based modes of participation enable massive virtual collaboration by thousands of members of the public. The conceptual model provides an example of a theory development process and discusses its application to an exploratory study. The paper contributes a multi-level process model for organizing investigation into the impact of design on this form of scientific knowledge production.

}, doi = {10.1504/IJODE.2010.035191}, url = {http://www.inderscience.com/filter.php?aid=35191}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/WigginsCrowstonIJODE2010.pdf}, author = {Wiggins, Andrea and Kevin Crowston} } @conference {2010, title = {Distributed Scientific Collaboration: Research Opportunities in Citizen Science}, booktitle = {The Changing Dynamics of Scientific Collaboration, CSCW 2010 workshop}, year = {2010}, month = {02/2010}, address = {Savannah, GA}, abstract = {This paper introduces a conceptual framework for research on citizen science, a form of collaboration involving scientists and volunteers in scientific research. Designing CSCW systems to support this type of scientific collaboration requires understanding the effects of organizational and work design on the scientific outcomes of citizen science projects. Initial directions for future research are identified, with the goal of developing a foundation for research on and development of cyberinfrastructure and collaborative technologies for supporting citizen science. }, url = {http://www.sci.utah.edu/cscw2010papers.html}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/WigginsCSCWworkshop_0.pdf}, author = {Wiggins, Andrea and Kevin Crowston} } @booklet {2010, title = {SoCS: Socially intelligent computing to support citizen science}, howpublished = {Proposal submitted to the NSF SOCS program}, year = {2010}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/NSFmaster.pdf}, author = {Kevin Crowston} } @conference {2009, title = {Designing Virtual Organizations for Citizen Science}, booktitle = {IFIP Working Group 8.2 OASIS workshop 2009}, year = {2009}, month = {12/2009}, address = {Phoenix, AZ}, url = {http://sprouts.aisnet.org/9-56/}, attachments = {https://citsci.syr.edu/sites/crowston.syr.edu/files/WigginsOASIS2009.pdf , https://citsci.syr.edu/sites/crowston.syr.edu/files/awigginsOASIS2009.ppt}, author = {Wiggins, Andrea and Kevin Crowston}, editor = {Uri Gal} }