Skip to content

Commit c1929ea

Browse files
committed
add dois
1 parent 0f11fa3 commit c1929ea

File tree

1 file changed

+27
-9
lines changed

1 file changed

+27
-9
lines changed

paper.bib

Lines changed: 27 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,21 +2,27 @@ @article{devlin2018bert
22
title={BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding},
33
author={Devlin, Jacob and Chang, Ming-Wei and Lee, Kenton and Toutanova, Kristina},
44
journal={arXiv preprint arXiv:1810.04805},
5-
year={2018}
5+
year={2018},
6+
doi={10.48550/arXiv.1810.04805},
7+
url={https://doi.org/10.48550/arXiv.1810.04805}
68
}
79

810
@article{liu2019roberta,
911
title={RoBERTa: A Robustly Optimized BERT Pretraining Approach},
1012
author={Liu, Yinhan and Ott, Myle and Goyal, Naman and Du, Jingfei and Joshi, Mandar and Chen, Danqi and Levy, Omer and Lewis, Mike and Zettlemoyer, Luke and Stoyanov, Veselin},
1113
journal={arXiv preprint arXiv:1907.11692},
12-
year={2019}
14+
year={2019},
15+
doi={10.48550/arXiv.1907.11692},
16+
url={https://doi.org/10.48550/arXiv.1907.11692}
1317
}
1418

1519
@article{openai2023gpt4,
1620
title={GPT-4 Technical Report},
1721
author={OpenAI},
1822
journal={arXiv preprint arXiv:2303.08774},
19-
year={2023}
23+
year={2023},
24+
doi={10.48550/arXiv.2303.08774},
25+
url={https://doi.org/10.48550/arXiv.2303.08774}
2026
}
2127

2228
@article{pedregosa2011scikit,
@@ -26,37 +32,48 @@ @article{pedregosa2011scikit
2632
volume={12},
2733
number={Oct},
2834
pages={2825--2830},
29-
year={2011}
35+
year={2011},
36+
url={http://www.jmlr.org/papers/v12/pedregosa11a.html}
3037
}
3138

3239
@article{wolf2019huggingface,
3340
title={HuggingFace's Transformers: State-of-the-Art Natural Language Processing},
3441
author={Wolf, Thomas and Debut, Lysandre and Sanh, Victor and Chaumond, Julien and Delangue, Clement and Moi, Anthony and Cistac, Pierric and Rault, Tim and Louf, R{\'e}mi and Funtowicz, Morgan and others},
3542
journal={arXiv preprint arXiv:1910.03771},
36-
year={2019}
43+
year={2019},
44+
doi={10.48550/arXiv.1910.03771},
45+
url={https://doi.org/10.48550/arXiv.1910.03771}
3746
}
3847

3948
@inproceedings{guo2017calibration,
4049
title={On Calibration of Modern Neural Networks},
4150
author={Guo, Chuan and Pleiss, Geoff and Sun, Yu and Weinberger, Kilian Q},
4251
booktitle={Proceedings of the 34th International Conference on Machine Learning},
43-
year={2017}
52+
series={Proceedings of Machine Learning Research},
53+
volume={70},
54+
pages={1321--1330},
55+
publisher={PMLR},
56+
year={2017},
57+
url={https://proceedings.mlr.press/v70/guo17a.html}
4458
}
4559

4660
@inproceedings{zadrozny2002transforming,
4761
title={Transforming Classifier Scores into Accurate Multiclass Probability Estimates},
4862
author={Zadrozny, Bianca and Elkan, Charles},
4963
booktitle={Proceedings of the Eighth ACM SIGKDD International Conference on Knowledge Discovery and Data Mining},
5064
pages={694--699},
51-
year={2002}
65+
year={2002},
66+
publisher={Association for Computing Machinery},
67+
doi={10.1145/775047.775151}
5268
}
5369

5470
@inproceedings{paszke2019pytorch,
5571
title={PyTorch: An Imperative Style, High-Performance Deep Learning Library},
5672
author={Paszke, Adam and Gross, Sam and Massa, Francisco and Lerer, Adam and Bradbury, James and Chanan, Gregory and Killeen, Trevor and Lin, Zeming and Gimelshein, Natalia and Antiga, Luca and others},
5773
booktitle={Advances in Neural Information Processing Systems},
5874
volume={32},
59-
year={2019}
75+
year={2019},
76+
url={https://proceedings.neurips.cc/paper/2019/hash/bdbca288fee7f92f2bfa9f7012727740-Abstract.html}
6077
}
6178

6279
@inproceedings{zhang2015character,
@@ -65,7 +82,8 @@ @inproceedings{zhang2015character
6582
booktitle={Advances in Neural Information Processing Systems},
6683
volume={28},
6784
pages={649--657},
68-
year={2015}
85+
year={2015},
86+
url={https://papers.nips.cc/paper/2015/hash/250cf8b51c773f3f8dc8b4be867a9a02-Abstract.html}
6987
}
7088

7189
@inproceedings{demszky2020goemotions,

0 commit comments

Comments
 (0)