Skip to content

Commit d9a201a

Browse files
authored
refactors (64bit#91)
* cargo fmt * remove colon * add function call stream in makefile
1 parent e120962 commit d9a201a

File tree

3 files changed

+67
-67
lines changed

3 files changed

+67
-67
lines changed

Diff for: async-openai/README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ Thank you for your time to contribute and improve the project, I'd be happy to h
105105

106106
A good starting point would be existing [open issues](https://github.com/64bit/async-openai/issues).
107107

108-
## Complimentary Crates:
108+
## Complimentary Crates
109109
- [openai-func-enums](https://github.com/frankfralick/openai-func-enums) provides procedural macros that make it easier to use this library with OpenAI API's function calling feature. It also provides derive macros you can add to existing [clap](https://github.com/clap-rs/clap) application subcommands for natural language use of command line tools.
110110

111111
## License

Diff for: examples/Makefile.toml

+1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ cd create-image-edit && cargo run && cd -
1515
cd create-image-variation && cargo run && cd -
1616
cd embeddings && cargo run && cd -
1717
cd function-call && cargo run && cd -
18+
cd function-call-stream && cargo run && cd -
1819
cd models && cargo run && cd -
1920
cd moderations && cargo run && cd -
2021
#cd rate-limit-completions && cargo run && cd -

Diff for: examples/function-call-stream/src/main.rs

+65-66
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
use std::io::{stdout, Write};
21
use std::collections::HashMap;
32
use std::error::Error;
3+
use std::io::{stdout, Write};
44

55
use async_openai::{
66
types::{
@@ -13,7 +13,6 @@ use async_openai::{
1313
use futures::StreamExt;
1414
use serde_json::json;
1515

16-
1716
#[tokio::main]
1817
async fn main() -> Result<(), Box<dyn Error>> {
1918
let client = Client::new();
@@ -44,80 +43,80 @@ async fn main() -> Result<(), Box<dyn Error>> {
4443
.build()?;
4544

4645
// the first response from GPT is just the json response containing the function that was called
47-
// and the model-generated arguments for that function (don't stream this)
46+
// and the model-generated arguments for that function (don't stream this)
4847
let response = client
49-
.chat()
50-
.create(request)
51-
.await?
52-
.choices
53-
.get(0)
54-
.unwrap()
55-
.message
56-
.clone();
57-
58-
if let Some(function_call) = response.function_call {
59-
let mut available_functions: HashMap<&str, fn(&str, &str) -> serde_json::Value> =
60-
HashMap::new();
61-
available_functions.insert("get_current_weather", get_current_weather);
62-
63-
let function_name = function_call.name;
64-
let function_args: serde_json::Value = function_call.arguments.parse().unwrap();
48+
.chat()
49+
.create(request)
50+
.await?
51+
.choices
52+
.get(0)
53+
.unwrap()
54+
.message
55+
.clone();
6556

66-
let location = function_args["location"].as_str().unwrap();
67-
let unit = "fahrenheit"; // why doesn't the model return a unit argument?
68-
let function = available_functions.get(function_name.as_str()).unwrap();
57+
if let Some(function_call) = response.function_call {
58+
let mut available_functions: HashMap<&str, fn(&str, &str) -> serde_json::Value> =
59+
HashMap::new();
60+
available_functions.insert("get_current_weather", get_current_weather);
61+
62+
let function_name = function_call.name;
63+
let function_args: serde_json::Value = function_call.arguments.parse().unwrap();
64+
65+
let location = function_args["location"].as_str().unwrap();
66+
let unit = "fahrenheit"; // why doesn't the model return a unit argument?
67+
let function = available_functions.get(function_name.as_str()).unwrap();
6968
let function_response = function(location, unit); // call the function
7069

71-
let message = vec![
72-
ChatCompletionRequestMessageArgs::default()
73-
.role(Role::User)
74-
.content("What's the weather like in Boston?")
75-
.build()?,
76-
ChatCompletionRequestMessageArgs::default()
77-
.role(Role::Function)
78-
.content(function_response.to_string())
79-
.name(function_name)
80-
.build()?
81-
];
70+
let message = vec![
71+
ChatCompletionRequestMessageArgs::default()
72+
.role(Role::User)
73+
.content("What's the weather like in Boston?")
74+
.build()?,
75+
ChatCompletionRequestMessageArgs::default()
76+
.role(Role::Function)
77+
.content(function_response.to_string())
78+
.name(function_name)
79+
.build()?,
80+
];
81+
82+
let request = CreateChatCompletionRequestArgs::default()
83+
.max_tokens(512u16)
84+
.model("gpt-3.5-turbo-0613")
85+
.messages(message)
86+
.build()?;
8287

83-
let request = CreateChatCompletionRequestArgs::default()
84-
.max_tokens(512u16)
85-
.model("gpt-3.5-turbo-0613")
86-
.messages(message)
87-
.build()?;
88-
89-
// Now stream received response from model, which essentially formats the function response
90-
let mut stream = client.chat().create_stream(request).await?;
88+
// Now stream received response from model, which essentially formats the function response
89+
let mut stream = client.chat().create_stream(request).await?;
9190

92-
let mut lock = stdout().lock();
93-
while let Some(result) = stream.next().await {
94-
match result {
95-
Ok(response) => {
96-
response.choices.iter().for_each(|chat_choice| {
97-
if let Some(ref content) = chat_choice.delta.content {
98-
write!(lock, "{}", content).unwrap();
99-
}
100-
});
101-
}
102-
Err(err) => {
103-
writeln!(lock, "error: {err}").unwrap();
104-
}
105-
}
106-
stdout().flush()?;
107-
}
108-
println!("{}", "\n");
109-
}
91+
let mut lock = stdout().lock();
92+
while let Some(result) = stream.next().await {
93+
match result {
94+
Ok(response) => {
95+
response.choices.iter().for_each(|chat_choice| {
96+
if let Some(ref content) = chat_choice.delta.content {
97+
write!(lock, "{}", content).unwrap();
98+
}
99+
});
100+
}
101+
Err(err) => {
102+
writeln!(lock, "error: {err}").unwrap();
103+
}
104+
}
105+
stdout().flush()?;
106+
}
107+
println!("{}", "\n");
108+
}
110109

111110
Ok(())
112111
}
113112

114113
fn get_current_weather(location: &str, unit: &str) -> serde_json::Value {
115-
let weather_info = json!({
116-
"location": location,
117-
"temperature": "72",
118-
"unit": unit,
119-
"forecast": ["sunny", "windy"]
120-
});
114+
let weather_info = json!({
115+
"location": location,
116+
"temperature": "72",
117+
"unit": unit,
118+
"forecast": ["sunny", "windy"]
119+
});
121120

122-
weather_info
121+
weather_info
123122
}

0 commit comments

Comments
 (0)