@article{Li01072024, author = {Li, Sizhen and Moayedpour, Saeed and Li, Ruijiang and Bailey, Michael and Riahi, Saleh and Kogler-Anele, Lorenzo and Miladi, Milad and Miner, Jacob and Pertuy, Fabien and Zheng, Dinghai and Wang, Jun and Balsubramani, Akshay and Tran, Khang and Zacharia, Minnie and Wu, Monica and Gu, Xiaobo and Clinton, Ryan and Asquith, Carla and Skaleski, Joseph and Boeglin, Lianne and Chivukula, Sudha and Dias, Anusha and Strugnell, Tod and Montoya, Fernando Ulloa and Agarwal, Vikram and Bar-Joseph, Ziv and Jager, Sven}, title = {CodonBERT large language model for mRNA vaccines}, volume = {34}, number = {7}, pages = {1027-1035}, year = {2024}, doi = {10.1101/gr.278870.123}, abstract ={mRNA-based vaccines and therapeutics are gaining popularity and usage across a wide range of conditions. One of the critical issues when designing such mRNAs is sequence optimization. Even small proteins or peptides can be encoded by an enormously large number of mRNAs. The actual mRNA sequence can have a large impact on several properties, including expression, stability, immunogenicity, and more. To enable the selection of an optimal sequence, we developed CodonBERT, a large language model (LLM) for mRNAs. Unlike prior models, CodonBERT uses codons as inputs, which enables it to learn better representations. CodonBERT was trained using more than 10 million mRNA sequences from a diverse set of organisms. The resulting model captures important biological concepts. CodonBERT can also be extended to perform prediction tasks for various mRNA properties. CodonBERT outperforms previous mRNA prediction methods, including on a new flu vaccine data set.}, URL = {http://genome.cshlp.org/content/34/7/1027.abstract}, eprint = {http://genome.cshlp.org/content/34/7/1027.full.pdf+html}, journal = {Genome Research} }