tool_zai-glm-4.5.yaml

 1---
 2version: 2
 3interactions:
 4  - id: 0
 5    request:
 6        proto: HTTP/1.1
 7        proto_major: 1
 8        proto_minor: 1
 9        content_length: 449
10        host: ""
11        body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence,Italy?","role":"user"}],"model":"glm-4.5","max_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}]}'
12        headers:
13            Accept:
14              - application/json
15            Content-Type:
16              - application/json
17            User-Agent:
18              - OpenAI/Go 2.3.0
19        url: https://api.z.ai/api/coding/paas/v4/chat/completions
20        method: POST
21    response:
22        proto: HTTP/2.0
23        proto_major: 2
24        proto_minor: 0
25        content_length: -1
26        uncompressed: true
27        body: '{"choices":[{"finish_reason":"tool_calls","index":0,"message":{"content":"\n\nI''ll check the weather in Florence, Italy for you.\n","reasoning_content":"The user is asking for weather information for Florence, Italy. I have access to a weather function that requires a \"location\" parameter. The user has provided \"Florence,Italy\" as the location. I should use this exact value as provided by the user for the location parameter.","role":"assistant","tool_calls":[{"function":{"arguments":"{\"location\": \"Florence,Italy\"}","name":"weather"},"id":"call_-8307219707908354744","index":0,"type":"function"}]}}],"created":1758795556,"id":"20250925181915c06ae89914d14803","model":"glm-4.5","request_id":"20250925181915c06ae89914d14803","usage":{"completion_tokens":89,"prompt_tokens":179,"prompt_tokens_details":{"cached_tokens":43},"total_tokens":268}}'
28        headers:
29            Content-Type:
30              - application/json; charset=UTF-8
31        status: 200 OK
32        code: 200
33        duration: 2.310922292s
34  - id: 1
35    request:
36        proto: HTTP/1.1
37        proto_major: 1
38        proto_minor: 1
39        content_length: 762
40        host: ""
41        body: '{"messages":[{"content":"You are a helpful assistant","role":"system"},{"content":"What''s the weather in Florence,Italy?","role":"user"},{"content":"\n\nI''ll check the weather in Florence, Italy for you.\n","tool_calls":[{"id":"call_-8307219707908354744","function":{"arguments":"{\"location\": \"Florence,Italy\"}","name":"weather"},"type":"function"}],"role":"assistant"},{"content":"40 C","tool_call_id":"call_-8307219707908354744","role":"tool"}],"model":"glm-4.5","max_tokens":4000,"tool_choice":"auto","tools":[{"function":{"name":"weather","strict":false,"description":"Get weather information for a location","parameters":{"properties":{"location":{"description":"the city","type":"string"}},"required":["location"],"type":"object"}},"type":"function"}]}'
42        headers:
43            Accept:
44              - application/json
45            Content-Type:
46              - application/json
47            User-Agent:
48              - OpenAI/Go 2.3.0
49        url: https://api.z.ai/api/coding/paas/v4/chat/completions
50        method: POST
51    response:
52        proto: HTTP/2.0
53        proto_major: 2
54        proto_minor: 0
55        content_length: -1
56        uncompressed: true
57        body: '{"choices":[{"finish_reason":"stop","index":0,"message":{"content":"\n\nThe weather in Florence, Italy is currently 40°C. That''s quite hot!","reasoning_content":"The weather function returned a result showing 40 C for Florence, Italy. This seems like a very high temperature, but I should report the result as provided by the function.","role":"assistant"}}],"created":1758795558,"id":"20250925181917fb13220f9015421a","model":"glm-4.5","request_id":"20250925181917fb13220f9015421a","usage":{"completion_tokens":57,"prompt_tokens":219,"prompt_tokens_details":{"cached_tokens":43},"total_tokens":276}}'
58        headers:
59            Content-Type:
60              - application/json; charset=UTF-8
61        status: 200 OK
62        code: 200
63        duration: 1.946890958s