<?xml version="1.0" encoding="UTF-8"?><rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dcq="http://purl.org/dc/terms/"><records count="1" morepages="false" start="1" end="1"><record rownumber="1"><dc:product_type>Journal Article</dc:product_type><dc:title>Achieving GPT-4o level performance in astronomy with a specialized 8B-parameter large language model</dc:title><dc:creator>de_Haan, Tijmen; Ting, Yuan-Sen; Ghosal, Tirthankar; Nguyen, Tuan Dung; Accomazzi, Alberto; Wells, Azton; Ramachandra, Nesar; Pan, Rui; Sun, Zechang</dc:creator><dc:corporate_author/><dc:editor/><dc:description>&lt;title&gt;Abstract&lt;/title&gt; &lt;p&gt;AstroSage-Llama-3.1-8B is a domain-specialized natural-language AI assistant tailored for research in astronomy, astrophysics, cosmology, and astronomical instrumentation. Trained on the complete collection of astronomy-related arXiv papers from 2007 to 2024 along with millions of synthetically-generated question-answer pairs and other astronomical literature, AstroSage-Llama-3.1-8B demonstrates remarkable proficiency on a wide range of questions. AstroSage-Llama-3.1-8B scores 80.9% on the AstroMLab-1 benchmark, greatly outperforming all models—proprietary and open-weight—in the 8-billion parameter class, and performing on par with GPT-4o. This achievement demonstrates the potential of domain specialization in AI, suggesting that focused training can yield capabilities exceeding those of much larger, general-purpose models. AstroSage-Llama-3.1-8B is freely available, enabling widespread access to advanced AI capabilities for astronomical education and research.&lt;/p&gt;</dc:description><dc:publisher>Scientific Reports</dc:publisher><dc:date>2025-12-01</dc:date><dc:nsf_par_id>10612805</dc:nsf_par_id><dc:journal_name>Scientific Reports</dc:journal_name><dc:journal_volume>15</dc:journal_volume><dc:journal_issue>1</dc:journal_issue><dc:page_range_or_elocation/><dc:issn>2045-2322</dc:issn><dc:isbn/><dc:doi>https://doi.org/10.1038/s41598-025-97131-y</dc:doi><dcq:identifierAwardId>2406729</dcq:identifierAwardId><dc:subject/><dc:version_number/><dc:location/><dc:rights/><dc:institution/><dc:sponsoring_org>National Science Foundation</dc:sponsoring_org></record></records></rdf:RDF>