|
1 | 1 | --- |
2 | 2 | --- |
3 | 3 |
|
| 4 | +@article{ellington2025virtual, |
| 5 | + title = {Virtual Screening on Cellular Systems with Contextualized Networks}, |
| 6 | + journal = {Machine Learning for Computational Biology (MLCB)}, |
| 7 | + author = {Ellington, Caleb and Addagudi, Sohan and Wang, Jiaqi and Lengerich, Benjamin and Xing, Eric P.}, |
| 8 | + abstract = {Virtual screening has traditionally focused on molecular targets, often failing to anticipate the complex, system-level failures that arise during clinical trials. |
| 9 | + To address this, we propose a framework for virtual screening against entire cellular systems. |
| 10 | + Our approach uses contextualized modeling, a multi-task learning approach for inferring context-specific network models, to infer perturbation-specific coexpression networks from large-scale screening datasets, enabling accurate prediction of network restructuring under diverse cellular and therapeutic contexts. |
| 11 | + We demonstrate that context-adaptive models outperform population baselines as well as baselines learned for specific cell type $\times$ perturbation combinations when predicting transcriptional responses and network changes. |
| 12 | + At test-time, contextualized networks generate accurate models of gene network reorganization on-demand for completely unseen cell types and therapies. |
| 13 | + Across multiple independent runs, networks provide a standard, cohesive, and constrained latent space to compare therapeutic effects from different perturbation modalities (knockout, overexpression, small molecule). |
| 14 | + Comparing perturbations in terms of cell-level effects leads to a principled approach to drug repurposing, safety profiling, and interpreting mechanism of action. |
| 15 | + This work advances a systems-level foundation for \textit{in silico} preclinical screening, promising a new approach for mapping potential therapies to rare and heterogeneous diseases. |
| 16 | + }, |
| 17 | + year = {2025}, |
| 18 | + bibtex_show = {true}, |
| 19 | +} |
| 20 | + |
4 | 21 | @article{liu2025mka, |
5 | 22 | title = {MKA: Memory-Keyed Attention for Efficient Long-Context Reasoning}, |
6 | 23 | journal={ICML Long Context Foundation Models (LCFM)}, |
|
0 commit comments