@@ -38,7 +38,16 @@ func TestUserAgentHeader(t *testing.T) {
3838 },
3939 }),
4040 )
41- client .Toolgroups .List (context .Background ())
41+ client .Chat .Completions .New (context .Background (), llamastackclient.ChatCompletionNewParams {
42+ Messages : []llamastackclient.ChatCompletionNewParamsMessageUnion {{
43+ OfUser : & llamastackclient.ChatCompletionNewParamsMessageUser {
44+ Content : llamastackclient.ChatCompletionNewParamsMessageUserContentUnion {
45+ OfString : llamastackclient .String ("string" ),
46+ },
47+ },
48+ }},
49+ Model : "model" ,
50+ })
4251 if userAgent != fmt .Sprintf ("LlamaStackClient/Go %s" , internal .PackageVersion ) {
4352 t .Errorf ("Expected User-Agent to be correct, but got: %#v" , userAgent )
4453 }
@@ -61,7 +70,16 @@ func TestRetryAfter(t *testing.T) {
6170 },
6271 }),
6372 )
64- _ , err := client .Toolgroups .List (context .Background ())
73+ _ , err := client .Chat .Completions .New (context .Background (), llamastackclient.ChatCompletionNewParams {
74+ Messages : []llamastackclient.ChatCompletionNewParamsMessageUnion {{
75+ OfUser : & llamastackclient.ChatCompletionNewParamsMessageUser {
76+ Content : llamastackclient.ChatCompletionNewParamsMessageUserContentUnion {
77+ OfString : llamastackclient .String ("string" ),
78+ },
79+ },
80+ }},
81+ Model : "model" ,
82+ })
6583 if err == nil {
6684 t .Error ("Expected there to be a cancel error" )
6785 }
@@ -95,7 +113,16 @@ func TestDeleteRetryCountHeader(t *testing.T) {
95113 }),
96114 option .WithHeaderDel ("X-Stainless-Retry-Count" ),
97115 )
98- _ , err := client .Toolgroups .List (context .Background ())
116+ _ , err := client .Chat .Completions .New (context .Background (), llamastackclient.ChatCompletionNewParams {
117+ Messages : []llamastackclient.ChatCompletionNewParamsMessageUnion {{
118+ OfUser : & llamastackclient.ChatCompletionNewParamsMessageUser {
119+ Content : llamastackclient.ChatCompletionNewParamsMessageUserContentUnion {
120+ OfString : llamastackclient .String ("string" ),
121+ },
122+ },
123+ }},
124+ Model : "model" ,
125+ })
99126 if err == nil {
100127 t .Error ("Expected there to be a cancel error" )
101128 }
@@ -124,7 +151,16 @@ func TestOverwriteRetryCountHeader(t *testing.T) {
124151 }),
125152 option .WithHeader ("X-Stainless-Retry-Count" , "42" ),
126153 )
127- _ , err := client .Toolgroups .List (context .Background ())
154+ _ , err := client .Chat .Completions .New (context .Background (), llamastackclient.ChatCompletionNewParams {
155+ Messages : []llamastackclient.ChatCompletionNewParamsMessageUnion {{
156+ OfUser : & llamastackclient.ChatCompletionNewParamsMessageUser {
157+ Content : llamastackclient.ChatCompletionNewParamsMessageUserContentUnion {
158+ OfString : llamastackclient .String ("string" ),
159+ },
160+ },
161+ }},
162+ Model : "model" ,
163+ })
128164 if err == nil {
129165 t .Error ("Expected there to be a cancel error" )
130166 }
@@ -152,7 +188,16 @@ func TestRetryAfterMs(t *testing.T) {
152188 },
153189 }),
154190 )
155- _ , err := client .Toolgroups .List (context .Background ())
191+ _ , err := client .Chat .Completions .New (context .Background (), llamastackclient.ChatCompletionNewParams {
192+ Messages : []llamastackclient.ChatCompletionNewParamsMessageUnion {{
193+ OfUser : & llamastackclient.ChatCompletionNewParamsMessageUser {
194+ Content : llamastackclient.ChatCompletionNewParamsMessageUserContentUnion {
195+ OfString : llamastackclient .String ("string" ),
196+ },
197+ },
198+ }},
199+ Model : "model" ,
200+ })
156201 if err == nil {
157202 t .Error ("Expected there to be a cancel error" )
158203 }
@@ -174,7 +219,16 @@ func TestContextCancel(t *testing.T) {
174219 )
175220 cancelCtx , cancel := context .WithCancel (context .Background ())
176221 cancel ()
177- _ , err := client .Toolgroups .List (cancelCtx )
222+ _ , err := client .Chat .Completions .New (cancelCtx , llamastackclient.ChatCompletionNewParams {
223+ Messages : []llamastackclient.ChatCompletionNewParamsMessageUnion {{
224+ OfUser : & llamastackclient.ChatCompletionNewParamsMessageUser {
225+ Content : llamastackclient.ChatCompletionNewParamsMessageUserContentUnion {
226+ OfString : llamastackclient .String ("string" ),
227+ },
228+ },
229+ }},
230+ Model : "model" ,
231+ })
178232 if err == nil {
179233 t .Error ("Expected there to be a cancel error" )
180234 }
@@ -193,7 +247,16 @@ func TestContextCancelDelay(t *testing.T) {
193247 )
194248 cancelCtx , cancel := context .WithTimeout (context .Background (), 2 * time .Millisecond )
195249 defer cancel ()
196- _ , err := client .Toolgroups .List (cancelCtx )
250+ _ , err := client .Chat .Completions .New (cancelCtx , llamastackclient.ChatCompletionNewParams {
251+ Messages : []llamastackclient.ChatCompletionNewParamsMessageUnion {{
252+ OfUser : & llamastackclient.ChatCompletionNewParamsMessageUser {
253+ Content : llamastackclient.ChatCompletionNewParamsMessageUserContentUnion {
254+ OfString : llamastackclient .String ("string" ),
255+ },
256+ },
257+ }},
258+ Model : "model" ,
259+ })
197260 if err == nil {
198261 t .Error ("expected there to be a cancel error" )
199262 }
@@ -218,7 +281,16 @@ func TestContextDeadline(t *testing.T) {
218281 },
219282 }),
220283 )
221- _ , err := client .Toolgroups .List (deadlineCtx )
284+ _ , err := client .Chat .Completions .New (deadlineCtx , llamastackclient.ChatCompletionNewParams {
285+ Messages : []llamastackclient.ChatCompletionNewParamsMessageUnion {{
286+ OfUser : & llamastackclient.ChatCompletionNewParamsMessageUser {
287+ Content : llamastackclient.ChatCompletionNewParamsMessageUserContentUnion {
288+ OfString : llamastackclient .String ("string" ),
289+ },
290+ },
291+ }},
292+ Model : "model" ,
293+ })
222294 if err == nil {
223295 t .Error ("expected there to be a deadline error" )
224296 }
@@ -262,10 +334,14 @@ func TestContextDeadlineStreaming(t *testing.T) {
262334 },
263335 }),
264336 )
265- stream := client .Responses .NewStreaming (deadlineCtx , llamastackclient.ResponseNewParams {
266- Input : llamastackclient.ResponseNewParamsInputUnion {
267- OfString : llamastackclient .String ("string" ),
268- },
337+ stream := client .Chat .Completions .NewStreaming (deadlineCtx , llamastackclient.ChatCompletionNewParams {
338+ Messages : []llamastackclient.ChatCompletionNewParamsMessageUnion {{
339+ OfUser : & llamastackclient.ChatCompletionNewParamsMessageUser {
340+ Content : llamastackclient.ChatCompletionNewParamsMessageUserContentUnion {
341+ OfString : llamastackclient .String ("string" ),
342+ },
343+ },
344+ }},
269345 Model : "model" ,
270346 })
271347 for stream .Next () {
@@ -311,12 +387,16 @@ func TestContextDeadlineStreamingWithRequestTimeout(t *testing.T) {
311387 },
312388 }),
313389 )
314- stream := client .Responses .NewStreaming (
390+ stream := client .Chat . Completions .NewStreaming (
315391 context .Background (),
316- llamastackclient.ResponseNewParams {
317- Input : llamastackclient.ResponseNewParamsInputUnion {
318- OfString : llamastackclient .String ("string" ),
319- },
392+ llamastackclient.ChatCompletionNewParams {
393+ Messages : []llamastackclient.ChatCompletionNewParamsMessageUnion {{
394+ OfUser : & llamastackclient.ChatCompletionNewParamsMessageUser {
395+ Content : llamastackclient.ChatCompletionNewParamsMessageUserContentUnion {
396+ OfString : llamastackclient .String ("string" ),
397+ },
398+ },
399+ }},
320400 Model : "model" ,
321401 },
322402 option .WithRequestTimeout ((100 * time .Millisecond )),
0 commit comments