1
- use std:: io:: { stdout, Write } ;
2
1
use std:: collections:: HashMap ;
3
2
use std:: error:: Error ;
3
+ use std:: io:: { stdout, Write } ;
4
4
5
5
use async_openai:: {
6
6
types:: {
@@ -13,7 +13,6 @@ use async_openai::{
13
13
use futures:: StreamExt ;
14
14
use serde_json:: json;
15
15
16
-
17
16
#[ tokio:: main]
18
17
async fn main ( ) -> Result < ( ) , Box < dyn Error > > {
19
18
let client = Client :: new ( ) ;
@@ -44,80 +43,80 @@ async fn main() -> Result<(), Box<dyn Error>> {
44
43
. build ( ) ?;
45
44
46
45
// the first response from GPT is just the json response containing the function that was called
47
- // and the model-generated arguments for that function (don't stream this)
46
+ // and the model-generated arguments for that function (don't stream this)
48
47
let response = client
49
- . chat ( )
50
- . create ( request)
51
- . await ?
52
- . choices
53
- . get ( 0 )
54
- . unwrap ( )
55
- . message
56
- . clone ( ) ;
57
-
58
- if let Some ( function_call) = response. function_call {
59
- let mut available_functions: HashMap < & str , fn ( & str , & str ) -> serde_json:: Value > =
60
- HashMap :: new ( ) ;
61
- available_functions. insert ( "get_current_weather" , get_current_weather) ;
62
-
63
- let function_name = function_call. name ;
64
- let function_args: serde_json:: Value = function_call. arguments . parse ( ) . unwrap ( ) ;
48
+ . chat ( )
49
+ . create ( request)
50
+ . await ?
51
+ . choices
52
+ . get ( 0 )
53
+ . unwrap ( )
54
+ . message
55
+ . clone ( ) ;
65
56
66
- let location = function_args[ "location" ] . as_str ( ) . unwrap ( ) ;
67
- let unit = "fahrenheit" ; // why doesn't the model return a unit argument?
68
- let function = available_functions. get ( function_name. as_str ( ) ) . unwrap ( ) ;
57
+ if let Some ( function_call) = response. function_call {
58
+ let mut available_functions: HashMap < & str , fn ( & str , & str ) -> serde_json:: Value > =
59
+ HashMap :: new ( ) ;
60
+ available_functions. insert ( "get_current_weather" , get_current_weather) ;
61
+
62
+ let function_name = function_call. name ;
63
+ let function_args: serde_json:: Value = function_call. arguments . parse ( ) . unwrap ( ) ;
64
+
65
+ let location = function_args[ "location" ] . as_str ( ) . unwrap ( ) ;
66
+ let unit = "fahrenheit" ; // why doesn't the model return a unit argument?
67
+ let function = available_functions. get ( function_name. as_str ( ) ) . unwrap ( ) ;
69
68
let function_response = function ( location, unit) ; // call the function
70
69
71
- let message = vec ! [
72
- ChatCompletionRequestMessageArgs :: default ( )
73
- . role( Role :: User )
74
- . content( "What's the weather like in Boston?" )
75
- . build( ) ?,
76
- ChatCompletionRequestMessageArgs :: default ( )
77
- . role( Role :: Function )
78
- . content( function_response. to_string( ) )
79
- . name( function_name)
80
- . build( ) ?
81
- ] ;
70
+ let message = vec ! [
71
+ ChatCompletionRequestMessageArgs :: default ( )
72
+ . role( Role :: User )
73
+ . content( "What's the weather like in Boston?" )
74
+ . build( ) ?,
75
+ ChatCompletionRequestMessageArgs :: default ( )
76
+ . role( Role :: Function )
77
+ . content( function_response. to_string( ) )
78
+ . name( function_name)
79
+ . build( ) ?,
80
+ ] ;
81
+
82
+ let request = CreateChatCompletionRequestArgs :: default ( )
83
+ . max_tokens ( 512u16 )
84
+ . model ( "gpt-3.5-turbo-0613" )
85
+ . messages ( message)
86
+ . build ( ) ?;
82
87
83
- let request = CreateChatCompletionRequestArgs :: default ( )
84
- . max_tokens ( 512u16 )
85
- . model ( "gpt-3.5-turbo-0613" )
86
- . messages ( message)
87
- . build ( ) ?;
88
-
89
- // Now stream received response from model, which essentially formats the function response
90
- let mut stream = client. chat ( ) . create_stream ( request) . await ?;
88
+ // Now stream received response from model, which essentially formats the function response
89
+ let mut stream = client. chat ( ) . create_stream ( request) . await ?;
91
90
92
- let mut lock = stdout ( ) . lock ( ) ;
93
- while let Some ( result) = stream. next ( ) . await {
94
- match result {
95
- Ok ( response) => {
96
- response. choices . iter ( ) . for_each ( |chat_choice| {
97
- if let Some ( ref content) = chat_choice. delta . content {
98
- write ! ( lock, "{}" , content) . unwrap ( ) ;
99
- }
100
- } ) ;
101
- }
102
- Err ( err) => {
103
- writeln ! ( lock, "error: {err}" ) . unwrap ( ) ;
104
- }
105
- }
106
- stdout ( ) . flush ( ) ?;
107
- }
108
- println ! ( "{}" , "\n " ) ;
109
- }
91
+ let mut lock = stdout ( ) . lock ( ) ;
92
+ while let Some ( result) = stream. next ( ) . await {
93
+ match result {
94
+ Ok ( response) => {
95
+ response. choices . iter ( ) . for_each ( |chat_choice| {
96
+ if let Some ( ref content) = chat_choice. delta . content {
97
+ write ! ( lock, "{}" , content) . unwrap ( ) ;
98
+ }
99
+ } ) ;
100
+ }
101
+ Err ( err) => {
102
+ writeln ! ( lock, "error: {err}" ) . unwrap ( ) ;
103
+ }
104
+ }
105
+ stdout ( ) . flush ( ) ?;
106
+ }
107
+ println ! ( "{}" , "\n " ) ;
108
+ }
110
109
111
110
Ok ( ( ) )
112
111
}
113
112
114
113
fn get_current_weather ( location : & str , unit : & str ) -> serde_json:: Value {
115
- let weather_info = json ! ( {
116
- "location" : location,
117
- "temperature" : "72" ,
118
- "unit" : unit,
119
- "forecast" : [ "sunny" , "windy" ]
120
- } ) ;
114
+ let weather_info = json ! ( {
115
+ "location" : location,
116
+ "temperature" : "72" ,
117
+ "unit" : unit,
118
+ "forecast" : [ "sunny" , "windy" ]
119
+ } ) ;
121
120
122
- weather_info
121
+ weather_info
123
122
}
0 commit comments