tool.yaml

 1---
 2version: 2
 3interactions:
 4- id: 0
 5  request:
 6    proto: HTTP/1.1
 7    proto_major: 1
 8    proto_minor: 1
 9    content_length: 471
10    host: ""
11    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence,Italy?","role":"user"}],"model":"openai/gpt-oss-20b","max_completion_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}]}'
12    headers:
13      Accept:
14      - application/json
15      Content-Type:
16      - application/json
17      User-Agent:
18      - OpenAI/Go 2.7.1
19    url: http://localhost:8080/v1/chat/completions
20    method: POST
21  response:
22    proto: HTTP/1.1
23    proto_major: 1
24    proto_minor: 1
25    content_length: 803
26    body: '{"choices":[{"finish_reason":"tool_calls","index":0,"message":{"role":"assistant","reasoning_content":"We need to call the weather function.","content":null,"tool_calls":[{"type":"function","function":{"name":"weather","arguments":"{\"location\":\"Florence, Italy\"}"},"id":"iSI9CucmKnbtrkxFldJPg3gSXbJ3Eom9"}]}}],"created":1761672366,"model":"openai/gpt-oss-20b","system_fingerprint":"b6865-1c1409e13","object":"chat.completion","usage":{"completion_tokens":34,"prompt_tokens":139,"total_tokens":173},"id":"chatcmpl-5ubBAQ1OuDgakYNl3FcW2ZzxP6PLPfv4","timings":{"cache_n":59,"prompt_n":80,"prompt_ms":292.99,"prompt_per_token_ms":3.662375,"prompt_per_second":273.0468616676337,"predicted_n":34,"predicted_ms":462.897,"predicted_per_token_ms":13.614617647058823,"predicted_per_second":73.45046522228488}}'
27    headers:
28      Content-Type:
29      - application/json; charset=utf-8
30    status: 200 OK
31    code: 200
32    duration: 760.630834ms
33- id: 1
34  request:
35    proto: HTTP/1.1
36    proto_major: 1
37    proto_minor: 1
38    content_length: 729
39    host: ""
40    body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence,Italy?","role":"user"},{"tool_calls":[{"id":"iSI9CucmKnbtrkxFldJPg3gSXbJ3Eom9","function":{"arguments":"{\"location\":\"Florence, Italy\"}","name":"weather"},"type":"function"}],"role":"assistant"},{"content":"40 C","tool_call_id":"iSI9CucmKnbtrkxFldJPg3gSXbJ3Eom9","role":"tool"}],"model":"openai/gpt-oss-20b","max_completion_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}]}'
41    headers:
42      Accept:
43      - application/json
44      Content-Type:
45      - application/json
46      User-Agent:
47      - OpenAI/Go 2.7.1
48    url: http://localhost:8080/v1/chat/completions
49    method: POST
50  response:
51    proto: HTTP/1.1
52    proto_major: 1
53    proto_minor: 1
54    content_length: 1240
55    body: '{"choices":[{"finish_reason":"stop","index":0,"message":{"role":"assistant","reasoning_content":"We have a response from the weather function: \"40 C\". The user asked: \"What''s the weather in Florence, Italy?\" The function returned \"40 C\". This is presumably the temperature. The assistant should respond accordingly, maybe confirm the weather. Also check if any other details needed. The user just asked for weather. So answer: \"The current temperature is 40°C.\" Maybe add a note that it''s high. Also can ask if they want more info. Probably just reply with the info.","content":"The current temperature in Florence, Italy is **40 °C**. If you need more details—like humidity, wind speed, or forecast—just let me know!"}}],"created":1761672368,"model":"openai/gpt-oss-20b","system_fingerprint":"b6865-1c1409e13","object":"chat.completion","usage":{"completion_tokens":145,"prompt_tokens":175,"total_tokens":320},"id":"chatcmpl-FBssXUKqLhrUJRe6LEAGJVRpBGrzYITs","timings":{"cache_n":139,"prompt_n":36,"prompt_ms":147.533,"prompt_per_token_ms":4.098138888888888,"prompt_per_second":244.0132038255848,"predicted_n":145,"predicted_ms":2037.608,"predicted_per_token_ms":14.05246896551724,"predicted_per_second":71.16187215597897}}'
56    headers:
57      Content-Type:
58      - application/json; charset=utf-8
59    status: 200 OK
60    code: 200
61    duration: 2.186660958s