1---
2version: 2
3interactions:
4- id: 0
5 request:
6 proto: HTTP/1.1
7 proto_major: 1
8 proto_minor: 1
9 content_length: 550
10 host: generativelanguage.googleapis.com
11 body: |
12 {"contents":[{"parts":[{"text":"What's the weather in Florence, Italy?"}],"role":"user"}],"generationConfig":{"thinkingConfig":{"includeThoughts":true,"thinkingBudget":128}},"systemInstruction":{"parts":[{"text":"You are a helpful assistant"}],"role":"user"},"toolConfig":{"functionCallingConfig":{"mode":"AUTO"}},"tools":[{"functionDeclarations":[{"description":"Get weather information for a location","name":"weather","parameters":{"properties":{"location":{"description":"the city","type":"STRING"}},"required":["location"],"type":"OBJECT"}}]}]}
13 headers:
14 Content-Type:
15 - application/json
16 User-Agent:
17 - google-genai-sdk/1.23.0 gl-go/go1.25.1
18 url: https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:generateContent
19 method: POST
20 response:
21 proto: HTTP/2.0
22 proto_major: 2
23 proto_minor: 0
24 content_length: -1
25 uncompressed: true
26 body: |
27 {
28 "candidates": [
29 {
30 "content": {
31 "parts": [
32 {
33 "text": "**Alright, here's what I'm thinking.**\n\nThe user wants the weather, simple enough. They've specified \"Florence, Italy\". My internal processing is telling me I have a `weather` tool that's specifically designed for this sort of query. It needs a `location` argument, and \"Florence, Italy\" is *perfect* for that. So, I should just feed \"Florence, Italy\" directly into the `weather` tool's `location` parameter. That's it, should be a quick and easy retrieval.\n",
34 "thought": true
35 },
36 {
37 "functionCall": {
38 "name": "weather",
39 "args": {
40 "location": "Florence, Italy"
41 }
42 },
43 "thoughtSignature": "Cv0BAdHtim/faT2T5UU6b+75rrcyw9ZzINxOjX23TIc40xJ+vXIv0bdgEIs1AckES34/063nRHBt7/gjtCTI+I/EQvdFAeW8wmtXO0Bp7Vqyineg3rw9u9W4E//mEjfLqcP9EiYi4HER9Pwj+iASiRpZQ0l/hJ32cSCzFVLVgVPQFRlWOrl7AbqZKRZDx1u72L6+3VBhXTGAB+i8m4CK0s1HY4/apyoOcfHVKJyNviC9kXRUhGstF6LO1Gwhz4ckVPHrVOMUCbEDkdcqR4zkCm1R/x6r+9nXwJKoKli5BDcQ7LssCBal009/HaRMJxVnFOLKi5NgSUFqf5SBmZ006g=="
44 }
45 ],
46 "role": "model"
47 },
48 "finishReason": "STOP",
49 "index": 0
50 }
51 ],
52 "usageMetadata": {
53 "promptTokenCount": 54,
54 "candidatesTokenCount": 15,
55 "totalTokenCount": 120,
56 "promptTokensDetails": [
57 {
58 "modality": "TEXT",
59 "tokenCount": 54
60 }
61 ],
62 "thoughtsTokenCount": 51
63 },
64 "modelVersion": "gemini-2.5-flash",
65 "responseId": "FHPWaKjHEu-C7M8P4Yuh8QY"
66 }
67 headers:
68 Content-Type:
69 - application/json; charset=UTF-8
70 status: 200 OK
71 code: 200
72 duration: 2.220739041s
73- id: 1
74 request:
75 proto: HTTP/1.1
76 proto_major: 1
77 proto_minor: 1
78 content_length: 776
79 host: generativelanguage.googleapis.com
80 body: |
81 {"contents":[{"parts":[{"text":"What's the weather in Florence, Italy?"}],"role":"user"},{"parts":[{"functionCall":{"args":{"location":"Florence, Italy"},"id":"weather","name":"weather"}}],"role":"model"},{"parts":[{"functionResponse":{"id":"weather","name":"weather","response":{"result":"40 C"}}}],"role":"user"}],"generationConfig":{"thinkingConfig":{"includeThoughts":true,"thinkingBudget":128}},"systemInstruction":{"parts":[{"text":"You are a helpful assistant"}],"role":"user"},"toolConfig":{"functionCallingConfig":{"mode":"AUTO"}},"tools":[{"functionDeclarations":[{"description":"Get weather information for a location","name":"weather","parameters":{"properties":{"location":{"description":"the city","type":"STRING"}},"required":["location"],"type":"OBJECT"}}]}]}
82 headers:
83 Content-Type:
84 - application/json
85 User-Agent:
86 - google-genai-sdk/1.23.0 gl-go/go1.25.1
87 url: https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:generateContent
88 method: POST
89 response:
90 proto: HTTP/2.0
91 proto_major: 2
92 proto_minor: 0
93 content_length: -1
94 uncompressed: true
95 body: |
96 {
97 "candidates": [
98 {
99 "content": {
100 "parts": [
101 {
102 "text": "The weather in Florence, Italy is 40 degrees Celsius."
103 }
104 ],
105 "role": "model"
106 },
107 "finishReason": "STOP",
108 "index": 0
109 }
110 ],
111 "usageMetadata": {
112 "promptTokenCount": 84,
113 "candidatesTokenCount": 13,
114 "totalTokenCount": 97,
115 "promptTokensDetails": [
116 {
117 "modality": "TEXT",
118 "tokenCount": 84
119 }
120 ]
121 },
122 "modelVersion": "gemini-2.5-flash",
123 "responseId": "FHPWaL-mJ4X9nsEP8NW9qQc"
124 }
125 headers:
126 Content-Type:
127 - application/json; charset=UTF-8
128 status: 200 OK
129 code: 200
130 duration: 338.789083ms