@@ -2,7 +2,6 @@ package openai
2
2
3
3
import (
4
4
"context"
5
- "log/slog"
6
5
"slices"
7
6
"strings"
8
7
@@ -12,10 +11,13 @@ import (
12
11
openai "github.com/openai/openai-go"
13
12
"github.com/openai/openai-go/option"
14
13
"github.com/openai/openai-go/packages/pagination"
14
+ "github.com/openai/openai-go/responses"
15
+ "github.com/openai/openai-go/shared"
16
+ "github.com/openai/openai-go/shared/constant"
15
17
)
16
18
17
19
func NewClient () (ret * Client ) {
18
- return NewClientCompatible ("OpenAI" , "https://api.openai.com/v1" , nil )
20
+ return NewClientCompatibleWithResponses ("OpenAI" , "https://api.openai.com/v1" , true , nil )
19
21
}
20
22
21
23
func NewClientCompatible (vendorName string , defaultBaseUrl string , configureCustom func () error ) (ret * Client ) {
@@ -28,6 +30,17 @@ func NewClientCompatible(vendorName string, defaultBaseUrl string, configureCust
28
30
return
29
31
}
30
32
33
+ func NewClientCompatibleWithResponses (vendorName string , defaultBaseUrl string , implementsResponses bool , configureCustom func () error ) (ret * Client ) {
34
+ ret = NewClientCompatibleNoSetupQuestions (vendorName , configureCustom )
35
+
36
+ ret .ApiKey = ret .AddSetupQuestion ("API Key" , true )
37
+ ret .ApiBaseURL = ret .AddSetupQuestion ("API Base URL" , false )
38
+ ret .ApiBaseURL .Value = defaultBaseUrl
39
+ ret .ImplementsResponses = implementsResponses
40
+
41
+ return
42
+ }
43
+
31
44
func NewClientCompatibleNoSetupQuestions (vendorName string , configureCustom func () error ) (ret * Client ) {
32
45
ret = & Client {}
33
46
@@ -46,9 +59,10 @@ func NewClientCompatibleNoSetupQuestions(vendorName string, configureCustom func
46
59
47
60
type Client struct {
48
61
* plugins.PluginBase
49
- ApiKey * plugins.SetupQuestion
50
- ApiBaseURL * plugins.SetupQuestion
51
- ApiClient * openai.Client
62
+ ApiKey * plugins.SetupQuestion
63
+ ApiBaseURL * plugins.SetupQuestion
64
+ ApiClient * openai.Client
65
+ ImplementsResponses bool // Whether this provider supports the Responses API
52
66
}
53
67
54
68
func (o * Client ) configure () (ret error ) {
@@ -75,35 +89,59 @@ func (o *Client) ListModels() (ret []string, err error) {
75
89
func (o * Client ) SendStream (
76
90
msgs []* chat.ChatCompletionMessage , opts * common.ChatOptions , channel chan string ,
77
91
) (err error ) {
78
- req := o .buildChatCompletionParams (msgs , opts )
79
- stream := o .ApiClient .Chat .Completions .NewStreaming (context .Background (), req )
92
+ // Use Responses API for OpenAI, Chat Completions API for other providers
93
+ if o .supportsResponsesAPI () {
94
+ return o .sendStreamResponses (msgs , opts , channel )
95
+ }
96
+ return o .sendStreamChatCompletions (msgs , opts , channel )
97
+ }
98
+
99
+ func (o * Client ) sendStreamResponses (
100
+ msgs []* chat.ChatCompletionMessage , opts * common.ChatOptions , channel chan string ,
101
+ ) (err error ) {
102
+ defer close (channel )
103
+
104
+ req := o .buildResponseParams (msgs , opts )
105
+ stream := o .ApiClient .Responses .NewStreaming (context .Background (), req )
80
106
for stream .Next () {
81
- chunk := stream .Current ()
82
- if len (chunk .Choices ) > 0 {
83
- channel <- chunk .Choices [0 ].Delta .Content
107
+ event := stream .Current ()
108
+ switch event .Type {
109
+ case string (constant .ResponseOutputTextDelta ("" ).Default ()):
110
+ channel <- event .AsResponseOutputTextDelta ().Delta
111
+ case string (constant .ResponseOutputTextDone ("" ).Default ()):
112
+ channel <- event .AsResponseOutputTextDone ().Text
84
113
}
85
114
}
86
115
if stream .Err () == nil {
87
116
channel <- "\n "
88
117
}
89
- close (channel )
90
118
return stream .Err ()
91
119
}
92
120
93
121
func (o * Client ) Send (ctx context.Context , msgs []* chat.ChatCompletionMessage , opts * common.ChatOptions ) (ret string , err error ) {
94
- req := o .buildChatCompletionParams (msgs , opts )
122
+ // Use Responses API for OpenAI, Chat Completions API for other providers
123
+ if o .supportsResponsesAPI () {
124
+ return o .sendResponses (ctx , msgs , opts )
125
+ }
126
+ return o .sendChatCompletions (ctx , msgs , opts )
127
+ }
128
+
129
+ func (o * Client ) sendResponses (ctx context.Context , msgs []* chat.ChatCompletionMessage , opts * common.ChatOptions ) (ret string , err error ) {
130
+ req := o .buildResponseParams (msgs , opts )
95
131
96
- var resp * openai. ChatCompletion
97
- if resp , err = o .ApiClient .Chat . Completions .New (ctx , req ); err != nil {
132
+ var resp * responses. Response
133
+ if resp , err = o .ApiClient .Responses .New (ctx , req ); err != nil {
98
134
return
99
135
}
100
- if len (resp .Choices ) > 0 {
101
- ret = resp .Choices [0 ].Message .Content
102
- slog .Debug ("SystemFingerprint: " + resp .SystemFingerprint )
103
- }
136
+ ret = o .extractText (resp )
104
137
return
105
138
}
106
139
140
+ // supportsResponsesAPI determines if the provider supports the new Responses API
141
+ func (o * Client ) supportsResponsesAPI () bool {
142
+ return o .ImplementsResponses
143
+ }
144
+
107
145
func (o * Client ) NeedsRawMode (modelName string ) bool {
108
146
openaiModelsPrefixes := []string {
109
147
"o1" ,
@@ -115,8 +153,6 @@ func (o *Client) NeedsRawMode(modelName string) bool {
115
153
"gpt-4o-mini-search-preview-2025-03-11" ,
116
154
"gpt-4o-search-preview" ,
117
155
"gpt-4o-search-preview-2025-03-11" ,
118
- "o4-mini-deep-research" ,
119
- "o4-mini-deep-research-2025-06-26" ,
120
156
}
121
157
for _ , prefix := range openaiModelsPrefixes {
122
158
if strings .HasPrefix (modelName , prefix ) {
@@ -126,56 +162,85 @@ func (o *Client) NeedsRawMode(modelName string) bool {
126
162
return slices .Contains (openAIModelsNeedingRaw , modelName )
127
163
}
128
164
129
- func (o * Client ) buildChatCompletionParams (
165
+ func (o * Client ) buildResponseParams (
130
166
inputMsgs []* chat.ChatCompletionMessage , opts * common.ChatOptions ,
131
- ) (ret openai. ChatCompletionNewParams ) {
167
+ ) (ret responses. ResponseNewParams ) {
132
168
133
- // Create a new slice for messages to be sent, converting from []*Msg to []Msg.
134
- // This also serves as a mutable copy for provider-specific modifications.
135
- messagesForRequest := make ([]openai.ChatCompletionMessageParamUnion , len (inputMsgs ))
169
+ items := make ([]responses.ResponseInputItemUnionParam , len (inputMsgs ))
136
170
for i , msgPtr := range inputMsgs {
137
- msg := * msgPtr // copy
138
- // Provider-specific modification for DeepSeek:
171
+ msg := * msgPtr
139
172
if strings .Contains (opts .Model , "deepseek" ) && len (inputMsgs ) == 1 && msg .Role == chat .ChatMessageRoleSystem {
140
173
msg .Role = chat .ChatMessageRoleUser
141
174
}
142
- messagesForRequest [i ] = convertMessage (msg )
175
+ items [i ] = convertMessage (msg )
143
176
}
144
- ret = openai.ChatCompletionNewParams {
145
- Model : openai .ChatModel (opts .Model ),
146
- Messages : messagesForRequest ,
177
+
178
+ ret = responses.ResponseNewParams {
179
+ Model : shared .ResponsesModel (opts .Model ),
180
+ Input : responses.ResponseNewParamsInputUnion {
181
+ OfInputItemList : items ,
182
+ },
147
183
}
184
+
148
185
if ! opts .Raw {
149
186
ret .Temperature = openai .Float (opts .Temperature )
150
187
ret .TopP = openai .Float (opts .TopP )
151
- ret .PresencePenalty = openai .Float (opts .PresencePenalty )
152
- ret .FrequencyPenalty = openai .Float (opts .FrequencyPenalty )
188
+ if opts .MaxTokens != 0 {
189
+ ret .MaxOutputTokens = openai .Int (int64 (opts .MaxTokens ))
190
+ }
191
+
192
+ // Add parameters not officially supported by Responses API as extra fields
193
+ extraFields := make (map [string ]any )
194
+ if opts .PresencePenalty != 0 {
195
+ extraFields ["presence_penalty" ] = opts .PresencePenalty
196
+ }
197
+ if opts .FrequencyPenalty != 0 {
198
+ extraFields ["frequency_penalty" ] = opts .FrequencyPenalty
199
+ }
153
200
if opts .Seed != 0 {
154
- ret .Seed = openai .Int (int64 (opts .Seed ))
201
+ extraFields ["seed" ] = opts .Seed
202
+ }
203
+ if len (extraFields ) > 0 {
204
+ ret .SetExtraFields (extraFields )
155
205
}
156
206
}
157
207
return
158
208
}
159
209
160
- func convertMessage (msg chat.ChatCompletionMessage ) openai.ChatCompletionMessageParamUnion {
161
- switch msg .Role {
162
- case chat .ChatMessageRoleSystem :
163
- return openai .SystemMessage (msg .Content )
164
- case chat .ChatMessageRoleUser :
165
- if len (msg .MultiContent ) > 0 {
166
- var parts []openai.ChatCompletionContentPartUnionParam
167
- for _ , p := range msg .MultiContent {
168
- switch p .Type {
169
- case chat .ChatMessagePartTypeText :
170
- parts = append (parts , openai .TextContentPart (p .Text ))
171
- case chat .ChatMessagePartTypeImageURL :
172
- parts = append (parts , openai .ImageContentPart (openai.ChatCompletionContentPartImageImageURLParam {URL : p .ImageURL .URL }))
210
+ func convertMessage (msg chat.ChatCompletionMessage ) responses.ResponseInputItemUnionParam {
211
+ result := convertMessageCommon (msg )
212
+ role := responses .EasyInputMessageRole (result .Role )
213
+
214
+ if result .HasMultiContent {
215
+ var parts []responses.ResponseInputContentUnionParam
216
+ for _ , p := range result .MultiContent {
217
+ switch p .Type {
218
+ case chat .ChatMessagePartTypeText :
219
+ parts = append (parts , responses .ResponseInputContentParamOfInputText (p .Text ))
220
+ case chat .ChatMessagePartTypeImageURL :
221
+ part := responses .ResponseInputContentParamOfInputImage (responses .ResponseInputImageDetailAuto )
222
+ if part .OfInputImage != nil {
223
+ part .OfInputImage .ImageURL = openai .String (p .ImageURL .URL )
224
+ }
225
+ parts = append (parts , part )
226
+ }
227
+ }
228
+ contentList := responses .ResponseInputMessageContentListParam (parts )
229
+ return responses .ResponseInputItemParamOfMessage (contentList , role )
230
+ }
231
+ return responses .ResponseInputItemParamOfMessage (result .Content , role )
232
+ }
233
+
234
+ func (o * Client ) extractText (resp * responses.Response ) (ret string ) {
235
+ for _ , item := range resp .Output {
236
+ if item .Type == "message" {
237
+ for _ , c := range item .Content {
238
+ if c .Type == "output_text" {
239
+ ret += c .AsOutputText ().Text
173
240
}
174
241
}
175
- return openai . UserMessage ( parts )
242
+ break
176
243
}
177
- return openai .UserMessage (msg .Content )
178
- default :
179
- return openai .AssistantMessage (msg .Content )
180
244
}
245
+ return
181
246
}
0 commit comments