@inbook{fa67a06ee3a64074887e3065cce0cb06,
title = "Crowdsourcing technology to support academic research",
abstract = "Current crowdsourcing platforms typically concentrate on simple microtasks and do not meet the needs of academic research well, where more complex, time consuming studies are required. This has lead to the development of specialised software tools to support academic research on such platforms. However, the loose coupling of the software with the crowdsourcing site means that there is only limited access to the features of the platform. In addition, the specialised nature of the software tools means that technical knowledge is needed to operate them. Hence there is great potential to enrich the features of crowdsourcing platforms from an academic perspective. In this chapter we discuss the possibilities for practical improvement of academic crowdsourced studies through adaption of technological solutions.",
author = "Matthias Hirth and Jason Jacques and Peter Rodgers and Ognjen Scekic and Michael Wybrow",
note = "Funding: The genesis and planning of this chapter took place at the Dagstuhl Seminar #15481, “Evaluation in the Crowd: Crowdsourcing and Human- Centred Experiments” held in November 2015. Jason Jacques was supported by a stu- dentship from the Engineering and Physical Sciences Research Council. Ognjen Scekic was supported by the EU FP7 SmartSociety project under grant #600854. Michael Wybrow was supported by the Australian Research Council Discovery Project grant DP140100077. This work was partially funded by the Deutsche Forschungsgemeinschaft (DFG) under Grants HO4770/2-2 and TR257/38-2.",
year = "2017",
month = sep,
day = "29",
doi = "10.1007/978-3-319-66435-4_4",
language = "English",
isbn = "9783319664347",
series = "Lecture notes in computer science",
publisher = "Springer",
pages = "70--95",
editor = "Daniel Archambault and Helen Purchase and Tobias Ho{\ss}feld",
booktitle = "Evaluation in the crowd: crowdsourcing and human-centered experiments",
address = "Netherlands",
}