Skip to content

Commit d67e8eb

Browse files
committed
overhaul resume
1 parent b671ec0 commit d67e8eb

File tree

3 files changed

+40
-31
lines changed

3 files changed

+40
-31
lines changed

resume/ags.bib

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -125,11 +125,13 @@ @inproceedings{sorokin.RTE_DeepONet
125125
url = {https://openreview.net/forum?id=SHidR8UMKo},
126126
}
127127

128-
@book{bacho.CHONKNORIS.tmp,
128+
@article{bacho.CHONKNORIS,
129129
title = {Operator learning at machine precision},
130130
author = {Aras Bacho and Aleksei G. Sorokin and Xianjin Yang and Th\'{e}o Bourdais and Edoardo Calvello and Matthieu Darcy and Alexander Hsu and Bamdad Hosseini and Houman Owhadi},
131131
year = {2025},
132-
note = {In preparation},
132+
journal = {ArXiv preprint},
133+
volume = {abs/2511.19980},
134+
url = {https://arxiv.org/abs/2511.19980},
133135
}
134136

135137
@article{gjergo2025massive,
@@ -190,3 +192,10 @@ @inproceedings{sorokin.fastgps_probnum25
190192
editor = {Kanagawa, Motonobu and Cockayne, Jon and Gessner, Alexandra and Hennig, Philipp},
191193
pdf = {https://raw.githubusercontent.com/mlresearch/v271/main/assets/sorokin25a/sorokin25a.pdf},
192194
}
195+
196+
@phdthesis{sorokin.thesis,
197+
title = {Algorithms and scientific software for quasi-{M}onte {C}arlo, fast {G}aussian process regression, and scientific machine learning},
198+
author = {Aleksei G. Sorokin},
199+
year = {2025},
200+
school = {Illinois Institute of Technology},
201+
}

resume/sorokin_resume.pdf

2.08 KB
Binary file not shown.

resume/sorokin_resume.tex

Lines changed: 29 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -61,32 +61,38 @@ \subsection{Education}
6161
\newentry{\normalfont{2017.08 - 2021.05}}{\textbf{B.S. in Applied Math, Minor in Computer Science.} IIT. Summa Cum Laude. GPA $3.94 / 4$.}
6262

6363
\subsection{Experiences}
64-
\newentry{\normalfont{2025.01 - 2025.12}}{\textbf{DOE SCGSR Fellow in Applied Mathematics} at \textbf{Sandia National Laboratory} in Livermore, CA. I developed Gaussian process based scientific ML models for machine precision solutions to nonlinear PDEs. I built fast, scalable multitask Gaussian processes for multi-fidelity modeling. Both projects produced publications and open-source software with HPC support.}
65-
\newentry{\normalfont{2024.05 - 2024.08}}{\textbf{Scientific Machine Learning Researcher} at \textbf{FM (Factory Mutual Insurance Company).} I built scientific ML models, including Physics Informed Neural Networks (PINNs) and Deep Operator Networks (DeepONets), for solving Radiative Transport Equations (RTEs) used to speed up CFD fire dynamics simulations. Resulted in publication of \citetitle{sorokin.RTE_DeepONet}.}
66-
\newentry{\normalfont{2023.05 - 2023.08}}{\textbf{Graduate Intern} at \textbf{Los Alamos National Laboratory.} I modeled the solution processes of PDEs with random coefficients using efficient and error aware Gaussian processes. Resulted in publication of \citetitle{sorokin.gp4darcy}.}
67-
\newentry{\normalfont{2022.05 - 2022.08}}{\textbf{Givens Associate Intern} at \textbf{Argonne National Laboratory}. I researched methods to efficiently estimate failure probability using Monte Carlo with non-parametric importance sampling. Resulted in publication of \citetitle{sorokin.adaptive_prob_failure_GP}.}
68-
\newentry{\normalfont{2021.05 - 2021.08}}{\textbf{ML Engineer Intern} at \textbf{SigOpt, an Intel Company}. I developed novel meta-learning techniques for model-aware hyperparameter tuning via Bayesian optimization. In a six-person ML engineering team, I contributed production code and learned key elements of the AWS stack. Resulted in publication of \citetitle{sorokin.sigopt_mulch}.}
64+
\newentry{\normalfont{2025.01 - 2025.12}}{\textbf{DOE SCGSR Fellow in Applied Math} at \textbf{Sandia National Laboratory} in Livermore, CA. I produced scientific ML models for machine-precision solutions to nonlinear PDEs \cite{bacho.CHONKNORIS}. I developed scalable multi-fidelity Gaussian processes regression models and open-source software implementations \cite{sorokin.FastBayesianMLQMC,sorokin.fastgps_probnum25}.}
65+
\newentry{\normalfont{2024.05 - 2024.08}}{\textbf{Scientific Machine Learning Researcher} at \textbf{FM (Factory Mutual Insurance Company).} I deployed scientific ML models, including PINNs DeepONets, to accelerate CFD fire dynamics simulations \cite{sorokin.RTE_DeepONet}.}
66+
\newentry{\normalfont{2023.05 - 2023.08}}{\textbf{Graduate Intern} at \textbf{Los Alamos National Laboratory.} I modeled multi-fidelity solutions to PDE with random coefficients using efficient and error aware Gaussian processes regression models \cite{sorokin.gp4darcy}.}
67+
\newentry{\normalfont{2022.05 - 2022.08}}{\textbf{Givens Associate Intern} at \textbf{Argonne National Laboratory}. I derived error bounds and proposed a sequential sampling method for efficiently estimating failure probabilities with probabilistic models \cite{sorokin.adaptive_prob_failure_GP}.}
68+
\newentry{\normalfont{2021.05 - 2021.08}}{\textbf{ML Engineer Intern} at \textbf{SigOpt, an Intel Company}. In a six-person ML team, I contributed production code for meta-learning model-aware hyperparameter tuning via Bayesian optimization \cite{sorokin.sigopt_mulch}.}
6969
\newentry{\normalfont{2021.08 - 2025.01}}{\textbf{Teaching Assistant} at \textbf{IIT}. I led reviews for PhD qualifying exams in analysis and computational math.}
70-
% \newentry{2018 - 2021}{\textbf{Lead Developer} of \textbf{DNNB: The Deep Neural Network Builder in Python.} This research package implements deep learning models from scratch in Python. See \itlink{github.com/alegresor/DNNB}{https://github.com/alegresor/DNNB}.}
71-
% \newentry{2018 - Present}{\textbf{Administrative Assistant} for \textbf{The Center for Interdisciplinary Scientific Computation at IIT}. I scheduled lecture series and maintained information on the CISC website at \itlink{cos.iit.edu/cisc/}{https://cos.iit.edu/cisc/}.}
72-
% \newentry{2018 - 2019}{\textbf{Instructor} for the \textbf{STARS Computing Corp's Computer Discover Program.} I developed a curriculum for middle school and high school girls to learn programmatic thinking with Python.}
73-
70+
\newentry{\normalfont{2018.05 - 2019.08}}{\textbf{Instructor} for the \textbf{STARS Computing Corps' Computer Discover Program.} I taught and developed curriculum for middle school and high school girls to learn programmatic thinking in Python.}
71+
\newentry{\normalfont{2022.09 - 2022.11}}{\textbf{Participant} in \textbf{Argonne National Laboratory's Course on AI Driven Science on Supercomputers}. Key topics included handling large scale data pipelines and parallel training for neural networks.} %\itlink{github.com/alegresor/ai-science-training-series}{https://github.com/alegresor/ai-science-training-series}.
7472
\subsection{Open-Source Software}
75-
\newentry{\texttt{QMCPy}}{\textbf{Quasi-Monte Carlo Python Software} (\href{https://qmcsoftware.github.io/QMCSoftware}{qmcsoftware.github.io/QMCSoftware}), lead developer. This package provides high quality quasi-random sequence generators, automatic variable transformations, adaptive stopping criteria algorithms, and diverse use cases. Over the past five years, this project has grown to dozens of collaborators and multiple publications \cite{sorokin.2025.ld_randomizations_ho_nets_fast_kernel_mats,choi.challenges_great_qmc_software,choi.QMC_software,sorokin.MC_vector_functions_integrals,sorokin.QMC_IS_QMCPy,hickernell.qmc_what_why_how,jain.bernstein_betting_confidence_intervals}.}
76-
\newentry{\texttt{FastGPs}}{\textbf{Scalable Gaussian Process Regression in Python} (\href{https://alegresor.github.io/fastgps}{alegresor.github.io/fastgps}). Gaussian process (GP) regression models typically require $\mathcal{O}(n^2)$ storage and $\mathcal{O}(n^3)$ computations. \texttt{FastGPs} implements GPs which requires only $\mathcal{O}(n)$ storage and $\mathcal{O}(n \log n)$ computations by pairing certain quasi-random sampling locations with matching kernels to yield structured Gram matrices. We support GPU scaling, batched inference, robust hyperparameter optimization, and multitask GPs.}
77-
\newentry{\texttt{QMCGenerators}}{\textbf{Quasi-Random Sequence Generators in Julia} (\href{https://alegresor.github.io/QMCGenerators.jl}{{alegresor.github.io/QMCGenerators.jl}}). This package includes routines to generate and randomize quasi-random sequences used in Quasi-Monte Carlo. Supported low discrepancy sequences include lattices with random shifts and digital nets (e.g. Sobol' points) with random digital shifts, linear matrix scrambling, nested uniform scrambling, and higher order construction through digital interlacing. These features are also supported in \texttt{QMCPy}.}
78-
\newentry{\texttt{AI on HPC}}{\textbf{AI Driven Science on Supercomputers Course} at \textbf{Argonne National Laboratory}.}
79-
%Key topics included handling large scale data pipelines and parallel training for neural networks.} %\itlink{github.com/alegresor/ai-science-training-series}{https://github.com/alegresor/ai-science-training-series}.
73+
74+
\newentry{\texttt{QMCPy}}{\textbf{Quasi-Monte Carlo Python Software} (\href{https://qmcsoftware.github.io/QMCSoftware}{qmcsoftware.github.io/QMCSoftware}). I led dozens of collaborators across acadamia and industy to develop QMC sequence generators, automatic variable transformations, adaptive error estimation algorithms, and diverse use cases \cite{sorokin.thesis,sorokin.2025.ld_randomizations_ho_nets_fast_kernel_mats,choi.challenges_great_qmc_software,choi.QMC_software,sorokin.MC_vector_functions_integrals,sorokin.QMC_IS_QMCPy,hickernell.qmc_what_why_how,jain.bernstein_betting_confidence_intervals}.}
75+
\newentry{\texttt{FastGPs}}{\textbf{Scalable Gaussian Process Regression in Python} (\href{https://alegresor.github.io/fastgps}{alegresor.github.io/fastgps}). This supports GPU scaling, batched inference, robust hyperparameter optimization, multi-fidelity GPs, and efficient Bayesian cubature. \texttt{FastGPs} is the first package to implement GPs which require only $\mathcal{O}(n)$ storage and $\mathcal{O}(n \log n)$ computations compared to the tyipcal $\mathcal{O}(n^2)$ storage and $\mathcal{O}(n^3)$ computations requirements.}
76+
\newentry{\scalebox{.9}{\texttt{QMCGenerators.jl}}}{\textbf{Randomized Quasi-Monte Carlo Sequences in Julia} (\href{https://alegresor.github.io/QMCGenerators.jl}{alegresor.github.io/QMCGenerators.jl}).}
77+
\newentry{\texttt{QMCToolsCL}}{\textbf{Randomized Quasi-Monte Carlo Sequences in C / OpenCL} (\href{https://qmcsoftware.github.io/QMCToolsCL/}{qmcsoftware.github.io/QMCToolsCL/}).}
78+
\newentry{\scalebox{.95}{\texttt{TorchOrthoPolys}}}{\textbf{Orthogonal Polynomials in PyTorch} (\href{https://alegresor.github.io/TorchOrthoPolys/}{alegresor.github.io/TorchOrthoPolys/}) with GPU support.}
8079

8180
\subsection{Awards}
82-
% \newentry{\normalfont{2025}}{\textbf{DOE SCGSR Fellow in Applied Mathematics}, Sandia National Laboratory, Livermore California.}
83-
\newentry{\normalfont{2025}}{\textbf{Karl Menger Student Award for Exceptional Scholarship (Graduate)}, IIT.}
84-
\newentry{\normalfont{2024}}{\textbf{College of Computing Excellence in Dissertation Research}, IIT.}
81+
\newentry{\normalfont{2025.01 - 2025.12}}{\textbf{DOE SCGSR Fellow in Applied Math}, Sandia National Laboratory, Livermore California.}
82+
\newentry{\normalfont{2025.01}}{\textbf{Karl Menger Student Award for Exceptional Scholarship (Graduate)}, IIT.}
83+
\newentry{\normalfont{2024.01}}{\textbf{College of Computing Excellence in Dissertation Research}, IIT.}
8584
\newentry{\normalfont{2024}}{\textbf{Teaching Assistant Award}, IIT.}
86-
\newentry{\normalfont{2023}}{\textbf{Outstanding Math Poster}, Los Alamos National Laboratory.}
87-
%\newentry{\normalfont{2021}}{\textbf{Best Manuscript}, IIT Undergraduate Research Journal.}
88-
%\newentry{\normalfont{2020}}{\textbf{Karl Menger Student Award for Exceptional Scholarship}, IIT.}
89-
%\newentry{\normalfont{2017 - Present}}{\textbf{Deans List Member}, IIT.}
85+
\newentry{\normalfont{2023.08}}{\textbf{Outstanding Math Poster}, Los Alamos National Laboratory.}
86+
% \newentry{\normalfont{2021}}{\textbf{Best Manuscript}, IIT Undergraduate Research Journal.}
87+
% \newentry{\normalfont{2020}}{\textbf{Karl Menger Student Award for Exceptional Scholarship (Undergraduate)}, IIT.}
88+
\newentry{\normalfont{2017.08 - 2025.05}}{\textbf{Deans List Member}, IIT, every semester.}
89+
90+
\subsection{References}
91+
\newentry{\normalfont{PhD Advisor}}{\textbf{Fred J. Hickernell} (\href{mailto:[email protected]}{[email protected]}) Vice Provost for Research and Professor of Applied Math, IIT.}
92+
\newentry{\normalfont{Mentor}}{\textbf{Nicolas W. Hengartner} (\href{mailto:[email protected]}{[email protected]}) Senior Scientist, Los Alamos National Lab.}
93+
\newentry{\normalfont{Mentor}}{\textbf{Michael J. McCourt} (\href{mailto:[email protected]}{[email protected]}) CTO and Co-Founder at Distributional.}
94+
\newentry{\normalfont{Mentor}}{\textbf{Pieterjan M. Robbe} (\href{[email protected]}{[email protected]}) Senior Member of Technical Staff, Sandia National Lab.}
95+
% \newentry{\normalfont{Mentor}}{\textbf{Vishwas Rao, PhD} (\href{mailto:[email protected]}{[email protected]}) Assistant Computational Mathematician, Argonne National Lab.}
9096

9197
% \subsection{Coursework}
9298
% \newentry{Math}{
@@ -113,13 +119,6 @@ \subsection{Awards}
113119
% Data Structures and Algorithms,
114120
% Object Oriented Programming I/II.}
115121

116-
117-
% \subsection{References}
118-
% \newentry{\href{mailto:[email protected]}{[email protected]}}{\textbf{Fred J. Hickernell, PhD} Vice Provost for Research and Professor of Applied Math, IIT.}
119-
% \newentry{\href{mailto:[email protected]}{[email protected]}}{\textbf{Nicolas W. Hengartner, PhD} Senior Scientist, Los Alamos National Laboratory.}
120-
% \newentry{\href{mailto:[email protected]}{[email protected]}}{\textbf{Michael J. McCourt, PhD} Co-Founder and CTO at Distributional.}
121-
% \newentry{\href{mailto:[email protected]}{[email protected]}}{\textbf{Vishwas Rao, PhD} Assistant Computational Mathematician, Argonne National Laboratory.}
122-
123122
%%%%% COVER LETTER
124123
%\clearpage
125124
%\recipient{HR Department}{Corporation\\123 Pleasant Lane\\12345 City, State} % Letter recipient
@@ -132,6 +131,7 @@ \subsection{Awards}
132131
%\makeletterclosing % Print letter signature
133132

134133
\nocite{
134+
sorokin.thesis,
135135
sorokin.fastgps_probnum25,
136136
hickernell.qmc_what_why_how,
137137
sorokin.MC_vector_functions_integrals,
@@ -148,7 +148,7 @@ \subsection{Awards}
148148
sorokin.adaptive_prob_failure_GP,
149149
jain.bernstein_betting_confidence_intervals,
150150
sorokin.FastBayesianMLQMC,
151-
% bacho.CHONKNORIS.tmp,
151+
bacho.CHONKNORIS,
152152
}
153153

154154
\printbibliography[title={Publications}]

0 commit comments

Comments
 (0)