@@ -4,10 +4,8 @@ import (
4
4
"context"
5
5
"encoding/json"
6
6
"os"
7
- "strings"
8
7
"testing"
9
8
10
- "github.com/gptscript-ai/gptscript/pkg/openai"
11
9
"github.com/gptscript-ai/gptscript/pkg/tests/tester"
12
10
"github.com/gptscript-ai/gptscript/pkg/types"
13
11
"github.com/hexops/autogold/v2"
@@ -220,21 +218,21 @@ func TestSubChat(t *testing.T) {
220
218
resp , err := r .Chat (context .Background (), nil , prg , os .Environ (), "Hello" )
221
219
require .NoError (t , err )
222
220
223
- autogold .Expect (strings . ReplaceAll ( `{
221
+ autogold .Expect (`{
224
222
"done": false,
225
223
"content": "Assistant 1",
226
- "toolID": "testdata/TestSubChat/test.gpt:6 ",
224
+ "toolID": "testdata/TestSubChat/test.gpt:chatbot ",
227
225
"state": {
228
226
"continuation": {
229
227
"state": {
230
228
"input": "Hello",
231
229
"completion": {
232
- "Model": "MODEL ",
230
+ "Model": "gpt-4o ",
233
231
"InternalSystemPrompt": null,
234
232
"Tools": [
235
233
{
236
234
"function": {
237
- "toolID": "testdata/TestSubChat/test.gpt:6 ",
235
+ "toolID": "testdata/TestSubChat/test.gpt:chatbot ",
238
236
"name": "chatbot",
239
237
"parameters": null
240
238
}
@@ -293,19 +291,19 @@ func TestSubChat(t *testing.T) {
293
291
},
294
292
"calls": {
295
293
"call_1": {
296
- "toolID": "testdata/TestSubChat/test.gpt:6 "
294
+ "toolID": "testdata/TestSubChat/test.gpt:chatbot "
297
295
}
298
296
}
299
297
},
300
298
"subCalls": [
301
299
{
302
- "toolId": "testdata/TestSubChat/test.gpt:6 ",
300
+ "toolId": "testdata/TestSubChat/test.gpt:chatbot ",
303
301
"callId": "call_1",
304
302
"state": {
305
303
"continuation": {
306
304
"state": {
307
305
"completion": {
308
- "Model": "MODEL ",
306
+ "Model": "gpt-4o ",
309
307
"InternalSystemPrompt": false,
310
308
"Tools": null,
311
309
"Messages": [
@@ -337,32 +335,32 @@ func TestSubChat(t *testing.T) {
337
335
},
338
336
"result": "Assistant 1"
339
337
},
340
- "continuationToolID": "testdata/TestSubChat/test.gpt:6 "
338
+ "continuationToolID": "testdata/TestSubChat/test.gpt:chatbot "
341
339
}
342
340
}
343
341
],
344
342
"subCallID": "call_1"
345
343
}
346
- }` , "MODEL" , openai . DefaultModel ) ).Equal (t , toJSONString (t , resp ))
344
+ }` ).Equal (t , toJSONString (t , resp ))
347
345
348
346
resp , err = r .Chat (context .Background (), resp .State , prg , os .Environ (), "User 1" )
349
347
require .NoError (t , err )
350
348
351
- autogold .Expect (strings . ReplaceAll ( `{
349
+ autogold .Expect (`{
352
350
"done": false,
353
351
"content": "Assistant 2",
354
- "toolID": "testdata/TestSubChat/test.gpt:6 ",
352
+ "toolID": "testdata/TestSubChat/test.gpt:chatbot ",
355
353
"state": {
356
354
"continuation": {
357
355
"state": {
358
356
"input": "Hello",
359
357
"completion": {
360
- "Model": "MODEL ",
358
+ "Model": "gpt-4o ",
361
359
"InternalSystemPrompt": null,
362
360
"Tools": [
363
361
{
364
362
"function": {
365
- "toolID": "testdata/TestSubChat/test.gpt:6 ",
363
+ "toolID": "testdata/TestSubChat/test.gpt:chatbot ",
366
364
"name": "chatbot",
367
365
"parameters": null
368
366
}
@@ -421,19 +419,19 @@ func TestSubChat(t *testing.T) {
421
419
},
422
420
"calls": {
423
421
"call_1": {
424
- "toolID": "testdata/TestSubChat/test.gpt:6 "
422
+ "toolID": "testdata/TestSubChat/test.gpt:chatbot "
425
423
}
426
424
}
427
425
},
428
426
"subCalls": [
429
427
{
430
- "toolId": "testdata/TestSubChat/test.gpt:6 ",
428
+ "toolId": "testdata/TestSubChat/test.gpt:chatbot ",
431
429
"callId": "call_1",
432
430
"state": {
433
431
"continuation": {
434
432
"state": {
435
433
"completion": {
436
- "Model": "MODEL ",
434
+ "Model": "gpt-4o ",
437
435
"InternalSystemPrompt": false,
438
436
"Tools": null,
439
437
"Messages": [
@@ -483,13 +481,13 @@ func TestSubChat(t *testing.T) {
483
481
},
484
482
"result": "Assistant 2"
485
483
},
486
- "continuationToolID": "testdata/TestSubChat/test.gpt:6 "
484
+ "continuationToolID": "testdata/TestSubChat/test.gpt:chatbot "
487
485
}
488
486
}
489
487
],
490
488
"subCallID": "call_1"
491
489
}
492
- }` , "MODEL" , openai . DefaultModel ) ).Equal (t , toJSONString (t , resp ))
490
+ }` ).Equal (t , toJSONString (t , resp ))
493
491
}
494
492
495
493
func TestChat (t * testing.T ) {
@@ -506,16 +504,16 @@ func TestChat(t *testing.T) {
506
504
resp , err := r .Chat (context .Background (), nil , prg , os .Environ (), "Hello" )
507
505
require .NoError (t , err )
508
506
509
- autogold .Expect (strings . ReplaceAll ( `{
507
+ autogold .Expect (`{
510
508
"done": false,
511
509
"content": "Assistant 1",
512
- "toolID": "testdata/TestChat/test.gpt:1 ",
510
+ "toolID": "testdata/TestChat/test.gpt:",
513
511
"state": {
514
512
"continuation": {
515
513
"state": {
516
514
"input": "Hello",
517
515
"completion": {
518
- "Model": "MODEL ",
516
+ "Model": "gpt-4o ",
519
517
"InternalSystemPrompt": false,
520
518
"Tools": null,
521
519
"Messages": [
@@ -556,23 +554,23 @@ func TestChat(t *testing.T) {
556
554
},
557
555
"result": "Assistant 1"
558
556
},
559
- "continuationToolID": "testdata/TestChat/test.gpt:1 "
557
+ "continuationToolID": "testdata/TestChat/test.gpt:"
560
558
}
561
- }` , "MODEL" , openai . DefaultModel ) ).Equal (t , toJSONString (t , resp ))
559
+ }` ).Equal (t , toJSONString (t , resp ))
562
560
563
561
resp , err = r .Chat (context .Background (), resp .State , prg , os .Environ (), "User 1" )
564
562
require .NoError (t , err )
565
563
566
- autogold .Expect (strings . ReplaceAll ( `{
564
+ autogold .Expect (`{
567
565
"done": false,
568
566
"content": "Assistant 2",
569
- "toolID": "testdata/TestChat/test.gpt:1 ",
567
+ "toolID": "testdata/TestChat/test.gpt:",
570
568
"state": {
571
569
"continuation": {
572
570
"state": {
573
571
"input": "Hello",
574
572
"completion": {
575
- "Model": "MODEL ",
573
+ "Model": "gpt-4o ",
576
574
"InternalSystemPrompt": false,
577
575
"Tools": null,
578
576
"Messages": [
@@ -631,9 +629,9 @@ func TestChat(t *testing.T) {
631
629
},
632
630
"result": "Assistant 2"
633
631
},
634
- "continuationToolID": "testdata/TestChat/test.gpt:1 "
632
+ "continuationToolID": "testdata/TestChat/test.gpt:"
635
633
}
636
- }` , "MODEL" , openai . DefaultModel ) ).Equal (t , toJSONString (t , resp ))
634
+ }` ).Equal (t , toJSONString (t , resp ))
637
635
}
638
636
639
637
func TestChatRunNoError (t * testing.T ) {
0 commit comments