blob: 6bce3990e9fd131462a451a9644418e5a661a032 [file] [log] [blame]
@article{korngiebel2021considering,
title={Considering the possibilities and pitfalls of Generative Pre-trained Transformer 3 (GPT-3) in healthcare delivery},
author={Korngiebel, Diane M and Mooney, Sean D},
journal={NPJ Digital Medicine},
volume={4},
number={1},
pages={1--3},
doi = {10.1038/s41746-021-00464-x},
year={2021},
publisher={Nature Publishing Group}
}
@article{binz2022using,
title={Using cognitive psychology to understand GPT-3},
author={Binz, Marcel and Schulz, Eric},
journal={arXiv preprint arXiv:2206.14576},
doi = {10.31234/osf.io/6dfgk},
year={2022}
}
@article{stevenson2022putting,
title={Putting GPT-3's Creativity to the (Alternative Uses) Test},
author={Stevenson, Claire and Smal, Iris and Baas, Matthijs and Grasman, Raoul and van der Maas, Han},
journal={arXiv preprint arXiv:2206.08932},
year={2022}
}
@article{garg2018word,
title={Word embeddings quantify 100 years of gender and ethnic stereotypes},
author={Garg, Nikhil and Schiebinger, Londa and Jurafsky, Dan and Zou, James},
journal={Proceedings of the National Academy of Sciences},
volume={115},
number={16},
pages={E3635--E3644},
doi = {10.1073/pnas.1720347115},
year={2018},
publisher={National Acad Sciences}
}
@inproceedings{bender2021dangers,
title={On the Dangers of Stochastic Parrots: Can Language Models Be Too Big?},
author={Bender, Emily M and Gebru, Timnit and McMillan-Major, Angelina and Shmitchell, Shmargaret},
booktitle={Proceedings of the 2021 ACM Conference on Fairness, Accountability, and Transparency},
pages={610--623},
year={2021}
}
@article{dale2021gpt,
title={GPT-3: What’s it good for?},
author={Dale, Robert},
journal={Natural Language Engineering},
volume={27},
number={1},
doi = {10.1017/s1351324920000601},
pages={113--118},
year={2021},
publisher={Cambridge University Press}
}
@article{brown2020language,
title={Language models are few-shot learners},
author={Brown, Tom and Mann, Benjamin and Ryder, Nick and Subbiah, Melanie and Kaplan, Jared D and Dhariwal, Prafulla and Neelakantan, Arvind and Shyam, Pranav and Sastry, Girish and Askell, Amanda and others},
journal={Advances in neural information processing systems},
volume={33},
pages={1877--1901},
year={2020}
}
@article{floridi2020gpt,
title={GPT-3: Its nature, scope, limits, and consequences},
author={Floridi, Luciano and Chiriatti, Massimo},
journal={Minds and Machines},
volume={30},
number={4},
pages={681--694},
doi = {10.2139/ssrn.3827044},
year={2020},
publisher={Springer}
}
@article{rahwan2019machine,
title={Machine behaviour},
author={Rahwan, Iyad and Cebrian, Manuel and Obradovich, Nick and Bongard, Josh and Bonnefon, Jean-Fran{\c{c}}ois and Breazeal, Cynthia and Crandall, Jacob W and Christakis, Nicholas A and Couzin, Iain D and Jackson, Matthew O and others},
journal={Nature},
volume={568},
number={7753},
pages={477--486},
year={2019},
publisher={Nature Publishing Group}
}
@inproceedings{black2022gpt,
title = "{GPT}-{N}eo{X}-20{B}: An Open-Source Autoregressive Language Model",
author = "Black, Sidney and
Biderman, Stella and
Hallahan, Eric and
Anthony, Quentin and
Gao, Leo and
Golding, Laurence and
He, Horace and
Leahy, Connor and
McDonell, Kyle and
Phang, Jason and
Pieler, Michael and
Prashanth, Usvsn Sai and
Purohit, Shivanshu and
Reynolds, Laria and
Tow, Jonathan and
Wang, Ben and
Weinbach, Samuel",
month = may,
year = "2022",
address = "virtual+Dublin",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2022.bigscience-1.9",
doi = "10.18653/v1/2022.bigscience-1.9",
pages = "95--136",
abstract = "We introduce GPT-NeoX-20B, a 20 billion parameter autoregressive language model trained on the Pile, whose weights will be made freely and openly available to the public through a permissive license. It is, to the best of our knowledge, the largest dense autoregressive model that has publicly available weights at the time of submission. In this work, we describe GPT-NeoX-20B{'}s architecture and training, and evaluate its performance. We open-source the training and evaluation code, as well as the model weights, at https://github.com/EleutherAI/gpt-neox.",
}
@article{miotto_who_2022,
title = {Who is {GPT}-3? {An} {Exploration} of {Personality}, {Values} and {Demographics}},
shorttitle = {Who is {GPT}-3?},
url = {http://arxiv.org/abs/2209.14338},
doi = {10.48550/arXiv.2209.14338},
abstract = {Language models such as GPT-3 have caused a furore in the research community. Some studies found that GPT-3 has some creative abilities and makes mistakes that are on par with human behaviour. This paper answers a related question: who is GPT-3? We administered two validated measurement tools to GPT-3 to assess its personality, the values it holds and its self-reported demographics. Our results show that GPT-3 scores similarly to human samples in terms of personality and - when provided with a model response memory - in terms of the values it holds. We provide the first evidence of psychological assessment of the GPT-3 model and thereby add to our understanding of the GPT-3 model. We close with suggestions for future research that moves social science closer to language models and vice versa.},
urldate = {2022-10-03},
author = {Miotto, Marilù and Rossberg, Nicola and Kleinberg, Bennett},
month = sep,
year = {2022},
note = {arXiv:2209.14338 [cs]},
keywords = {Computer Science - Computation and Language},
file = {arXiv.org Snapshot:/Users/bennettkleinberg/Zotero/storage/6NCPEUG4/2209.html:text/html},
}
@article{shihadehbrilliance,
title={Brilliance Bias in GPT-3},
author={Shihadeh, Juliana and Ackerman, Margareta and Troske, Ashley and Lawson, Nicole and Gonzalez, Edith},
year={2022},
doi={10.1109/ghtc55712.2022.9910995}
}
@article{vandermaas2021,
title = {How much intelligence is there in artificial intelligence? A 2020 update},
author = {van der Maas, Han L.J. and Snoek, Lukas and Stevenson, Claire E.},
year = {2021},
month = {07},
date = {2021-07},
journal = {Intelligence},
pages = {101548},
volume = {87},
doi = {10.1016/j.intell.2021.101548},
url = {https://linkinghub.elsevier.com/retrieve/pii/S0160289621000325},
langid = {en}
}