This is a library for integrating OpenAI in your flow function for flows.network.

Visit ChatGPT

```rust use flowsnetplatformsdk::logger; use lambdaflows::{requestreceived, sendresponse}; use openaiflows::{ chat::{ChatModel, ChatOptions}, OpenAIFlows, }; use serde_json::Value; use std::collections::HashMap;

[no_mangle]

[tokio::main(flavor = "current_thread")]

pub async fn run() { logger::init(); request_received(handler).await; }

async fn handler(qry: HashMap, body: Vec) { let co = ChatOptions { model: ChatModel::GPT35Turbo, restart: false, systemprompt: None, }; let of = OpenAIFlows::new();

let r = match of
    .chat_completion(
        "any_conversation_id",
        String::from_utf8_lossy(&body).into_owned().as_str(),
        &co,
    )
    .await
{
    Ok(c) => c.choice,
    Err(e) => e,
};

send_response(
    200,
    vec![(
        String::from("content-type"),
        String::from("text/plain; charset=UTF-8"),
    )],
    r.as_bytes().to_vec(),
);

}

```

This example lets you have a conversation with ChatGPT using chat_completion by Lambda request.

The whole document is here.