This crate provides a wrapper around the OpenAI API to use GPT-3.5 and GPT-4 for chatbots. It also provides a way to define functions that can be called from the chatbot.
This is a work in progress. The API is not stable and will change.
Add the following to your Cargo.toml
:
toml
[dependencies]
chatgpt-functions = "0.3"
The documentation is available at https://docs.rs/chatgpt-functions
You can find examples in the examples
folder.
```rust use anyhow::{Context, Result}; use dotenv::dotenv;
use chatgptfunctions::chatgpt::ChatGPTBuilder;
async fn main() -> Result<()> { dotenv().ok(); let key = std::env::var("OPENAIAPIKEY")?;
let mut gpt = ChatGPTBuilder::new().openai_api_token(key).build()?;
println!("Initialised chatbot. Enter your message to start a conversation.");
println!("Using:");
println!("- Model: {}", gpt.chat_context.model);
println!("- Session ID: {}", gpt.session_id);
println!("You can quit by pressing Ctrl+C (linux), or Cmd+C (Mac).");
println!("--------------------------------------");
loop {
println!("- Enter your message and press Enter:");
let mut input = String::new();
std::io::stdin()
.read_line(&mut input)
.context("Failed to read your input")?;
input.pop(); // Remove the trailing newline
println!("- AI:");
// println!("Request: {}", chat_context);
let answer = gpt.completion_managed(input).await?;
// println!("Full answer: {}", answer.to_string());
println!("{}", answer.choices[0].message);
println!("--------------------------------------");
}
} ```
```rust use anyhow::{Context, Result}; use chatgptfunctions::{ chatgpt::ChatGPTBuilder, function_specification::{FunctionSpecification}, }; use dotenv::dotenv;
async fn main() -> Result<()> { dotenv().ok(); let key = std::env::var("OPENAIAPIKEY")?;
let mut gpt = ChatGPTBuilder::new().openai_api_token(key).build()?;
let json = r#"
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"]
}
},
"required": ["location"]
}
}
"#;
let function: FunctionSpecification =
serde_json::from_str(json).expect("Could not parse correctly the function specification");
gpt.push_function(function);
println!("Initialised chatbot. Enter your message to start a conversation.");
println!("Using:");
println!("- Model: {}", gpt.chat_context.model);
println!("- Session ID: {}", gpt.session_id);
println!("You can quit by pressing Ctrl+C (linux), or Cmd+C (Mac).");
println!("--------------------------------------");
loop {
println!("- Enter your message and press Enter:");
let mut input = String::new();
std::io::stdin()
.read_line(&mut input)
.context("Failed to read your input")?;
input.pop(); // Remove the trailing newline
println!("- AI:");
// println!("Request: {}", chat_context);
let answer = gpt.completion_managed(input).await?;
println!("Full answer: {}", answer.to_string());
println!("--------------------------------------");
}
} ```
```bash curl https://api.openai.com/v1/chat/completions -H "Content-Type: application/json" -H "Authorization: Bearer $OPENAIAPIKEY" -d '{ "model": "gpt-3.5-turbo-0613", "messages": [{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "What is the weather like in Madrid, Spain?"}], "functions": [{ "name": "getcurrentweather", "description": "Get the current weather in a given location", "parameters": { "type": "object", "properties": { "location": { "type": "string", "description": "The city and state, e.g. San Francisco, CA" }, "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]} }, "required": ["location"] } }], "function_call": "auto" }'
{ "id": "chatcmpl-7Ut7jsNlTUO9k9L5kBF0uDAyG19pK", "object": "chat.completion", "created": 1687596091, "model": "gpt-3.5-turbo-0613", "choices": [ { "index": 0, "message": { "role": "assistant", "content": null, "functioncall": { "name": "getcurrentweather", "arguments": "{\n \"location\": \"Madrid, Spain\"\n}" } }, "finishreason": "functioncall" } ], "usage": { "prompttokens": 90, "completiontokens": 19, "totaltokens": 109 } } ```
Contributions are welcome! Please open an issue or a pull request.