Mogessie M., Ronchetti M. and Riccardi G. Predicting Student Progress from Peer-Assessment Data (Conference) Raleigh, USA,2016, 2016. (Abstract | Links | BibTeX | Tags: Education Analytics) Mogessie M, Riccardi G. and Ronchetti M. Predicting Students’ Final Exam Scores from their Course Activities (Article) Proc. IEEE Frontiers in Education, El Paso ( USA), 2015., 2015. (Abstract | Links | BibTeX | Tags: Education Analytics, Machine Learning) Mogessie M., Riccardi G. and Ronchetti M. A Web Based Peer Interaction Framework for Improved Assessment and Supervision of Students (Conference) 2014. (Abstract | Links | BibTeX | Tags: Education Analytics, Interactive Systems)2016
title = {Predicting Student Progress from Peer-Assessment Data},
author = {Mogessie M., Ronchetti M. and Riccardi G.},
url = {https://sisl.disi.unitn.it/wp-content/uploads/2016/11/EDM16-PredictingUserProgressFromPeers.pdf},
year = {2016},
date = {2016-11-01},
publisher = {Raleigh, USA,2016},
abstract = {Predicting overall student performance and monitoring progress have attracted more attention in the past five years than before. Demographic data, high school grades and test result constitute much of the data used for building prediction models. This study demonstrates how data from a peer-assessment environment can be used to build student progress prediction models. The possibility for automating tasks, coupled with minimal teacher intervention, make peer-assessment an efficient platform for gathering student activity data in a continuous manner. The performances of the prediction models are comparable with those trained using other educational data. Considering the fact that the student performance data do not include any teacher assessments, the results are more than encouraging and shall convince the reader that peerassessment has yet another advantage to offer in the realm of automated student progress monitoring and supervision.},
keywords = {Education Analytics}
}
2015
title = {Predicting Students’ Final Exam Scores from their Course Activities},
author = {Mogessie M, Riccardi G. and Ronchetti M.},
url = {https://sisl.disi.unitn.it/wp-content/uploads/2015/11/FIE15-PredictingStudentsScores.pdf},
year = {2015},
date = {2015-10-21},
journal = {Proc. IEEE Frontiers in Education, El Paso ( USA), 2015.},
abstract = {A common approach to the problem of predicting students’ exam scores has been to base this prediction on the previous educational history of students. In this paper, we present a model that bases this prediction on students’ performance on several tasks assigned throughout the duration
of the course. In order to build our prediction model, we use data from a semi-automated peer-assessment system implemented in two undergraduate-level computer science courses, where students ask questions about topics discussed in class, answer questions from their peers, and rate answers provided by their peers. We then construct features that are used to build several multiple linear regression models. We use the Root Mean Squared Error (RMSE) of the prediction models to evaluate their performance. Our final model, which has recorded an RMSE of 2.9326 for one course and 3.4383 for another on predicting grades on a scale of 18 to 30, is built using 14 features that capture various activities of students. Our work has possible implications in the MOOC arena and in similar online course administration systems.},
keywords = {Education Analytics, Machine Learning}
}
of the course. In order to build our prediction model, we use data from a semi-automated peer-assessment system implemented in two undergraduate-level computer science courses, where students ask questions about topics discussed in class, answer questions from their peers, and rate answers provided by their peers. We then construct features that are used to build several multiple linear regression models. We use the Root Mean Squared Error (RMSE) of the prediction models to evaluate their performance. Our final model, which has recorded an RMSE of 2.9326 for one course and 3.4383 for another on predicting grades on a scale of 18 to 30, is built using 14 features that capture various activities of students. Our work has possible implications in the MOOC arena and in similar online course administration systems.2014
title = {A Web Based Peer Interaction Framework for Improved Assessment and Supervision of Students},
author = {Mogessie M., Riccardi G. and Ronchetti M.},
url = {https://sisl.disi.unitn.it/wp-content/uploads/2014/11/EDMEDIA14-Peer-based-Assessment.pdf},
year = {2014},
date = {2014-01-01},
journal = {Proc. World Conference on Educational Multimedia, Hypermedia and Telecommunications, Tampere, 2014},
abstract = {One of the challenges of both traditional and contemporary instructional media in higher education is creating a sustainable teaching-learning environment that ensures continuous engagement of students and provides efficient means of assessing their performance. We present a peer-based framework designed to increase active participation of students in courses administered in both traditional and blended learning settings. Students are continuously engaged in attention-eliciting tasks and are assessed by their peers. The framework allows semi-automated assignment of tasks to students. In completing these tasks, students ask questions, answer questions from other students, evaluate the quality of question-answer pairs and rate answers provided by their peers. We have implemented this framework in several courses and run extensive experiments to assess the effectiveness of our approach. We discuss the results of students’ surveys of this approach, which, in general, has been perceived as useful in achieving better learning outcomes.},
keywords = {Education Analytics, Interactive Systems}
}