basic package setup
diff --git a/R/.DS_Store b/R/.DS_Store
new file mode 100644
index 0000000..5008ddf
--- /dev/null
+++ b/R/.DS_Store
Binary files differ
diff --git a/R/authenticate.R b/R/authenticate.R
new file mode 100644
index 0000000..e896cec
--- /dev/null
+++ b/R/authenticate.R
@@ -0,0 +1,33 @@
+#' Set up the authentication with your API key
+#'
+#' @description
+#' Access to GPT-3's functions requires an API key that you obtain from [https://openai.com/api/](https://openai.com/api/). `gpt3.authenticate()` accepts your API key and ensures that you can connect to the models. `gpt3.endsession()` overwrites your API key _for this session_ (it is recommended that you run this when you are done). `check_apikey_form()` is a simple check if any information has been provided at all.
+#' @param apikey A character vector that is your personal API key
+#' @return A confirmation message
+#' @examples
+#' # Starting a session:
+#' gpt3.authenticate(apikey = 'REPLACE_THIS_WITH_YOUR_KEY')
+# '
+#' # After you are finished:
+#' gpt3.endsession()
+#' @export
+gpt3.authenticate = function(apikey){
+  api_key <<- apikey
+  print(paste0("Will use --> ", api_key, " for authentication."))
+}
+
+gpt3.endsession = function(){
+  api_key = "---"
+  print('-- session ended: you need to re-authenticate again next time.')
+}
+
+check_apikey_form = function(){
+
+  if(exists(x = 'api_key') == F){
+    warning("Use gpt3.authenticate() to set your API key")
+  } else if(nchar(api_key) < 10){
+
+    warning("Use gpt3.authenticate() to set your API key")
+
+  }
+}
diff --git a/R/base_urls.R b/R/base_urls.R
new file mode 100644
index 0000000..41cb50f
--- /dev/null
+++ b/R/base_urls.R
@@ -0,0 +1,8 @@
+#' Contains the package's base URLs
+#'
+#' @description
+#' These are the base URLs for the `rgpt3` package. Do not change these!
+#' @export
+url.completions = "https://api.openai.com/v1/completions"
+url.embeddings = "https://api.openai.com/v1/embeddings"
+url.fine_tune = "https://api.openai.com/v1/fine-tunes"
diff --git a/R/bunch_embedding.R b/R/bunch_embedding.R
new file mode 100644
index 0000000..29e3dc6
--- /dev/null
+++ b/R/bunch_embedding.R
@@ -0,0 +1,32 @@
+gpt3.bunch_embedding = function(data
+                                , text_var
+                                , id_var
+                                , param_model = 'text-similarity-ada-001'){
+
+  data_ = data
+
+  data_length = data_[, .N]
+
+  empty_list = list()
+
+  for(i in 1:data_length){
+
+    print(paste0('Embedding: ', i, '/', data_length))
+
+    row_outcome = gpt3.make_embedding(model_ = param_model
+                                      , input_ = as.character(unname(data_[i, ..text_var])))
+
+    empty_df = data.frame(t(row_outcome))
+    names(empty_df) = paste0('dim_', 1:length(row_outcome))
+    empty_df$id_full = as.character(unname(data_[i, ..id_var]))
+
+    empty_list[[i]] = empty_df
+
+
+  }
+
+  output_data = rbindlist(empty_list)
+
+  return(output_data)
+
+}
diff --git a/R/bunch_request.R b/R/bunch_request.R
new file mode 100644
index 0000000..44fd78f
--- /dev/null
+++ b/R/bunch_request.R
@@ -0,0 +1,58 @@
+gpt3.bunch_request = function(data
+                              , prompt_var
+                              , completion_var_name = 'gpt3_completion'
+                              , param_model = 'text-davinci-002'
+                              , param_suffix = NULL
+                              , param_max_tokens = 256
+                              , param_temperature = 0.9
+                              , param_top_p = 1
+                              , param_n = 1
+                              , param_stream = F
+                              , param_logprobs = NULL
+                              , param_echo = F
+                              , param_stop = NULL
+                              , param_presence_penalty = 0
+                              , param_frequency_penalty = 0
+                              , param_best_of = 1
+                              , param_logit_bias = NULL){
+
+
+  data_ = data
+
+  data_length = data_[, .N]
+
+  data_[, completion_name := '']
+
+
+  for(i in 1:data_length){
+
+    print(paste0('Request: ', i, '/', data_length))
+
+    row_outcome = gpt3.make_request(prompt = as.character(unname(data_[i, ..prompt_var]))
+                                    , model = param_model
+                                    , output_type = 'detail'
+                                    , suffix = param_suffix
+                                    , max_tokens = param_max_tokens
+                                    , temperature = param_temperature
+                                    , top_p = param_top_p
+                                    , n = param_n
+                                    , stream = param_stream
+                                    , logprobs = param_logprobs
+                                    , echo = param_echo
+                                    , stop = param_stop
+                                    , presence_penalty = param_presence_penalty
+                                    , frequency_penalty = param_frequency_penalty
+                                    , best_of = param_best_of
+                                    , logit_bias = param_logit_bias)
+
+
+    data_$completion_name[i] = row_outcome$choices[[1]]$text
+
+
+  }
+
+  data_cols = ncol(data_)
+  names(data_)[data_cols] = completion_var_name
+
+  return(data_)
+}
diff --git a/R/make_embedding.R b/R/make_embedding.R
new file mode 100644
index 0000000..afdb610
--- /dev/null
+++ b/R/make_embedding.R
@@ -0,0 +1,19 @@
+gpt3.make_embedding = function(model_ = 'text-similarity-ada-001'
+                               , input_){
+
+  parameter_list = list(model = model_
+                        , input = input_)
+
+  request_base = httr::POST(url = url.embeddings
+                            , body = parameter_list
+                            , httr::add_headers(Authorization = paste("Bearer", api_key))
+                            , encode = "json")
+
+
+  output_base = httr::content(request_base)
+
+  embedding_raw = toNumeric(unlist(output_base$data[[1]]$embedding))
+
+  return(embedding_raw)
+
+}
diff --git a/R/make_request.R b/R/make_request.R
new file mode 100644
index 0000000..f6c135c
--- /dev/null
+++ b/R/make_request.R
@@ -0,0 +1,60 @@
+#' Make a test request to the GPT-3 API
+#'
+#' @description
+#' `gpt3.test_request()` sends a basic [completion request](https://beta.openai.com/docs/api-reference/completions) to the Open AI GPT-3 API.
+#' @param verbose (boolean) if TRUE prints the actual prompt and GPT-3 completion of the test request (default: FALSE).
+#' @return A message of success or failure of the connection.
+#' @examples
+#' gpt3.test_request()
+#' @export
+gpt3.make_request = function(prompt_
+                             , model_ = 'text-davinci-002'
+                             , output_type_ = 'string_only'
+                             , suffix_ = NULL
+                             , max_tokens_ = 256
+                             , temperature_ = 0.9
+                             , top_p_ = 1
+                             , n_ = 1
+                             , stream_ = F
+                             , logprobs_ = NULL
+                             , echo_ = F
+                             , stop_ = NULL
+                             , presence_penalty_ = 0
+                             , frequency_penalty_ = 0
+                             , best_of_ = 1
+                             , logit_bias_ = NULL
+)
+{
+
+  parameter_list = list(prompt = prompt_
+                        , model = model_
+                        , suffix = suffix_
+                        , max_tokens = max_tokens_
+                        , temperature = temperature_
+                        , top_p = top_p_
+                        , n = n_
+                        , stream = stream_
+                        , logprobs = logprobs_
+                        , echo = echo_
+                        , stop = stop_
+                        , presence_penalty = presence_penalty_
+                        , frequency_penalty = frequency_penalty_
+                        , best_of = best_of_
+                        , logit_bias = logit_bias_
+  )
+
+  request_base = httr::POST(url = url.completions
+                      , body = parameter_list
+                      , httr::add_headers(Authorization = paste("Bearer", api_key))
+                      , encode = "json")
+
+
+  if(output_type_ == 'string_only'){
+    output = httr::content(request_base)$choices[[1]]$text
+  } else {
+    output = httr::content(request_base)
+  }
+
+  return(output)
+
+}
diff --git a/R/test_request.R b/R/test_request.R
new file mode 100644
index 0000000..15269a7
--- /dev/null
+++ b/R/test_request.R
@@ -0,0 +1,24 @@
+#' Make a test request to the GPT-3 API
+#'
+#' @description
+#' `gpt3.test_request()` sends a basic [completion request](https://beta.openai.com/docs/api-reference/completions) to the Open AI GPT-3 API.
+#' @param verbose (boolean) if TRUE prints the actual prompt and GPT-3 completion of the test request (default: FALSE).
+#' @return A message of success or failure of the connection.
+#' @examples
+#' gpt3.test_request()
+#' @export
+gpt3.test_request = function(verbose=F){
+
+  check_apikey_form()
+
+  test_prompt = 'Write a story about R Studio:'
+  test_outout = gpt3.make_request(prompt_ = test_prompt
+                                  , max_tokens = 100)
+  print(paste0('.. test successful ..'))
+
+  if(verbose==T){
+    print(paste0('Requested completion for this prompt --> ', test_prompt))
+    print(paste0('GPT-3 completed --> ', test_outout))
+  }
+
+}