You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: docs/protein-optimization/references.bib
+17Lines changed: 17 additions & 0 deletions
Original file line number
Diff line number
Diff line change
@@ -99,3 +99,20 @@ @ARTICLE{pymoo
99
99
number={},
100
100
pages={89497-89509},
101
101
}
102
+
103
+
@article{Shahriari:BOReview:2016, title={Taking the Human Out of the Loop: A Review of Bayesian Optimization}, volume={104}, ISSN={0018-9219, 1558-2256}, DOI={10.1109/JPROC.2015.2494218}, abstractNote={Big data applications are typically associated with systems involving large numbers of users, massive complex software systems, and large-scale heterogeneous computing and storage architectures. The construction of such systems involves many distributed design choices. The end products (e.g., recommendation systems, medical analysis tools, real-time game engines, speech recognizers) thus involves many tunable configuration parameters. These parameters are often specified and hard-coded into the software by various developers or teams. If optimized jointly, these parameters can result in significant improvements. Bayesian optimization is a powerful tool for the joint optimization of design choices that is gaining great popularity in recent years. It promises greater automation so as to increase both product quality and human productivity. This review paper introduces Bayesian optimization, highlights some of its methodological aspects, and showcases a wide range of applications.}, number={1}, journal={Proceedings of the IEEE}, author={Shahriari, Bobak and Swersky, Kevin and Wang, Ziyu and Adams, Ryan P. and De Freitas, Nando}, year={2016}, month=jan, pages={148–175}, language={en} }
104
+
105
+
@inproceedings{Kirschner:LineBO:2019, title={Adaptive and Safe Bayesian Optimization in High Dimensions via One-Dimensional Subspaces}, ISSN={2640-3498}, url={https://proceedings.mlr.press/v97/kirschner19a.html}, abstractNote={Bayesian optimization is known to be difficult to scale to high dimensions, because the acquisition step requires solving a non-convex optimization problem in the same search space. In order to scale the method and keep its benefits, we propose an algorithm (LineBO) that restricts the problem to a sequence of iteratively chosen one-dimensional sub-problems that can be solved efficiently. We show that our algorithm converges globally and obtains a fast local rate when the function is strongly convex. Further, if the objective has an invariant subspace, our method automatically adapts to the effective dimension without changing the algorithm. When combined with the SafeOpt algorithm to solve the sub-problems, we obtain the first safe Bayesian optimization algorithm with theoretical guarantees applicable in high-dimensional settings. We evaluate our method on multiple synthetic benchmarks, where we obtain competitive performance. Further, we deploy our algorithm to optimize the beam intensity of the Swiss Free Electron Laser with up to 40 parameters while satisfying safe operation constraints.}, booktitle={Proceedings of the 36th International Conference on Machine Learning}, publisher={PMLR}, author={Kirschner, Johannes and Mutny, Mojmir and Hiller, Nicole and Ischebeck, Rasmus and Krause, Andreas}, year={2019}, month=may, pages={3429–3438}, language={en} }
106
+
107
+
108
+
@article{Balandat:botorch:2020, title={BoTorch: A Framework for Efficient Monte-Carlo Bayesian Optimization}, url={http://arxiv.org/abs/1910.06403}, abstractNote={Bayesian optimization provides sample-efficient global optimization for a broad range of applications, including automatic machine learning, engineering, physics, and experimental design. We introduce BoTorch, a modern programming framework for Bayesian optimization that combines Monte-Carlo (MC) acquisition functions, a novel sample average approximation optimization approach, auto-differentiation, and variance reduction techniques. BoTorch’s modular design facilitates flexible specification and optimization of probabilistic models written in PyTorch, simplifying implementation of new acquisition functions. Our approach is backed by novel theoretical convergence results and made practical by a distinctive algorithmic foundation that leverages fast predictive distributions, hardware acceleration, and deterministic optimization. We also propose a novel “one-shot” formulation of the Knowledge Gradient, enabled by a combination of our theoretical and software contributions. In experiments, we demonstrate the improved sample efficiency of BoTorch relative to other popular libraries.}, note={arXiv:1910.06403 [cs, math, stat]}, number={arXiv:1910.06403}, publisher={arXiv}, author={Balandat, Maximilian and Karrer, Brian and Jiang, Daniel R. and Daulton, Samuel and Letham, Benjamin and Wilson, Andrew Gordon and Bakshy, Eytan}, year={2020}, month=dec }
109
+
110
+
@inproceedings{gardner:gpytorch:2018,
111
+
title={GPyTorch: Blackbox Matrix-Matrix Gaussian Process Inference with GPU Acceleration},
112
+
author={Gardner, Jacob R and Pleiss, Geoff and Bindel, David and Weinberger, Kilian Q and Wilson, Andrew Gordon},
113
+
booktitle={Advances in Neural Information Processing Systems},
114
+
year={2018}
115
+
}
116
+
117
+
@article{GomezBombarelli:VAEsAndOpt:2018, title={Automatic Chemical Design Using a Data-Driven Continuous Representation of Molecules}, volume={4}, ISSN={2374-7943}, DOI={10.1021/acscentsci.7b00572}, abstractNote={We report a method to convert discrete representations of molecules to and from a multidimensional continuous representation. This model allows us to generate new molecules for efficient exploration and optimization through open-ended spaces of chemical compounds. A deep neural network was trained on hundreds of thousands of existing chemical structures to construct three coupled functions: an encoder, a decoder, and a predictor. The encoder converts the discrete representation of a molecule into a real-valued continuous vector, and the decoder converts these continuous vectors back to discrete molecular representations. The predictor estimates chemical properties from the latent continuous vector representation of the molecule. Continuous representations of molecules allow us to automatically generate novel chemical structures by performing simple operations in the latent space, such as decoding random vectors, perturbing known chemical structures, or interpolating between molecules. Continuous representations also allow the use of powerful gradient-based optimization to efficiently guide the search for optimized functional compounds. We demonstrate our method in the domain of drug-like molecules and also in a set of molecules with fewer that nine heavy atoms.}, number={2}, journal={ACS Central Science}, publisher={American Chemical Society}, author={Gómez-Bombarelli, Rafael and Wei, Jennifer N. and Duvenaud, David and Hernández-Lobato, José Miguel and Sánchez-Lengeling, Benjamín and Sheberla, Dennis and Aguilera-Iparraguirre, Jorge and Hirzel, Timothy D. and Adams, Ryan P. and Aspuru-Guzik, Alán}, year={2018}, month=feb, pages={268–276} }
0 commit comments