Skip to content

Commit

Permalink
Update papers.bib
Browse files Browse the repository at this point in the history
  • Loading branch information
IvyWang845 authored Jan 26, 2024
1 parent 6ac7da5 commit 4615344
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions _bibliography/papers.bib
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ @article{Cherish2023preprint
Conclusion: Results provide promising avenues for the assessment of malleable aspects of academic language in writing}
}

@unpublished{Wang2024ontology,
@article{Wang2024ontology,
title = {Extending a Pretrained Language Model (BERT) using an Ontological Perspective to Classify Students’ Scientific Expertise Level from
Written Responses},
author = {Wang, Heqiao and Haudek, Kevin and Manzanares, Amanda and Romulo, Chelsie and Royse, Emily},
Expand All @@ -139,7 +139,7 @@ @unpublished{Wang2024ontology
category = {Assessment & Measurement},
preview = {ontology.jpg},
selected={true},
note={preprint on webpage at \url{https://www.researchsquare.com/article/rs-3879583/v1},
note={preprint on webpage at \url{https://www.researchsquare.com/article/rs-3879583/v1}},
abstract = {The complex and interdisciplinary nature of scientific concepts presents formidable challenges for students in developing their knowledge-in-use skills. The utilization of computerized analysis for evaluating students’ contextualized constructed responses offers a potential avenue for educators to develop personalized and scalable interventions, thus supporting the teaching and learning of science consistent with contemporary calls. While prior research in artificial intelligence has demonstrated the effectiveness of algorithms, including Bidirectional Encoder Representations from Transformers (BERT), in tasks like automated classifications of constructed responses, these efforts have predominantly leaned towards text-level feature, often overlooking the exploration of conceptual ideas embedded in students’ responses from a cognitive perspective. Despite BERT’s performance in downstream tasks, challenges may arise in domain-specific tasks, particularly in establishing knowledge connections between specialized and open domains. These challenges become pronounced in small-scale and imbalanced educational datasets, where the available information for fine-tuning is frequently inadequate to capture task-specific nuances and contextual details. The primary objective of the present study is to investigate the effectiveness of the established industrial standard pretrained language model BERT, when integrated with an ontological framework aligned with our science assessment, in classifying students’ expertise levels in scientific explanation. Our findings indicate that while pretrained language models such as BERT contribute to enhanced performance in language-related tasks within educational contexts, the incorporation of identifying domain-specific terms and extracting and substituting with their associated sibling terms in sentences through ontology-based systems can further improve classification model performance. Further, we qualitatively examined student responses and found that, as expected, the ontology framework identified and substituted key domain specific terms in student responses that led to more accurate predictive scores. The study explores the practical implementation of ontology in classrooms to facilitate formative assessment and formulate instructional strategies.}
}

Expand Down

0 comments on commit 4615344

Please sign in to comment.