updated DOIs
diff --git a/paper/paper.bib b/paper/paper.bib
index 72f001a..6bce399 100644
--- a/paper/paper.bib
+++ b/paper/paper.bib
@@ -5,6 +5,7 @@
volume={4},
number={1},
pages={1--3},
+ doi = {10.1038/s41746-021-00464-x},
year={2021},
publisher={Nature Publishing Group}
}
@@ -13,6 +14,7 @@
title={Using cognitive psychology to understand GPT-3},
author={Binz, Marcel and Schulz, Eric},
journal={arXiv preprint arXiv:2206.14576},
+ doi = {10.31234/osf.io/6dfgk},
year={2022}
}
@@ -30,6 +32,7 @@
volume={115},
number={16},
pages={E3635--E3644},
+ doi = {10.1073/pnas.1720347115},
year={2018},
publisher={National Acad Sciences}
}
@@ -48,6 +51,7 @@
journal={Natural Language Engineering},
volume={27},
number={1},
+ doi = {10.1017/s1351324920000601},
pages={113--118},
year={2021},
publisher={Cambridge University Press}
@@ -69,6 +73,7 @@
volume={30},
number={4},
pages={681--694},
+ doi = {10.2139/ssrn.3827044},
year={2020},
publisher={Springer}
}
@@ -85,14 +90,35 @@
publisher={Nature Publishing Group}
}
-@article{black2022gpt,
- title={Gpt-neox-20b: An open-source autoregressive language model},
- author={Black, Sid and Biderman, Stella and Hallahan, Eric and Anthony, Quentin and Gao, Leo and Golding, Laurence and He, Horace and Leahy, Connor and McDonell, Kyle and Phang, Jason and others},
- journal={arXiv preprint arXiv:2204.06745},
- year={2022}
+@inproceedings{black2022gpt,
+ title = "{GPT}-{N}eo{X}-20{B}: An Open-Source Autoregressive Language Model",
+ author = "Black, Sidney and
+ Biderman, Stella and
+ Hallahan, Eric and
+ Anthony, Quentin and
+ Gao, Leo and
+ Golding, Laurence and
+ He, Horace and
+ Leahy, Connor and
+ McDonell, Kyle and
+ Phang, Jason and
+ Pieler, Michael and
+ Prashanth, Usvsn Sai and
+ Purohit, Shivanshu and
+ Reynolds, Laria and
+ Tow, Jonathan and
+ Wang, Ben and
+ Weinbach, Samuel",
+ month = may,
+ year = "2022",
+ address = "virtual+Dublin",
+ publisher = "Association for Computational Linguistics",
+ url = "https://aclanthology.org/2022.bigscience-1.9",
+ doi = "10.18653/v1/2022.bigscience-1.9",
+ pages = "95--136",
+ abstract = "We introduce GPT-NeoX-20B, a 20 billion parameter autoregressive language model trained on the Pile, whose weights will be made freely and openly available to the public through a permissive license. It is, to the best of our knowledge, the largest dense autoregressive model that has publicly available weights at the time of submission. In this work, we describe GPT-NeoX-20B{'}s architecture and training, and evaluate its performance. We open-source the training and evaluation code, as well as the model weights, at https://github.com/EleutherAI/gpt-neox.",
}
-
@article{miotto_who_2022,
title = {Who is {GPT}-3? {An} {Exploration} of {Personality}, {Values} and {Demographics}},
shorttitle = {Who is {GPT}-3?},
@@ -111,7 +137,8 @@
@article{shihadehbrilliance,
title={Brilliance Bias in GPT-3},
author={Shihadeh, Juliana and Ackerman, Margareta and Troske, Ashley and Lawson, Nicole and Gonzalez, Edith},
- year={2022}
+ year={2022},
+ doi={10.1109/ghtc55712.2022.9910995}
}
@article{vandermaas2021,