google-gemini-2.5-pro.yaml

 1---
 2version: 2
 3interactions:
 4- id: 0
 5  request:
 6    proto: HTTP/1.1
 7    proto_major: 1
 8    proto_minor: 1
 9    content_length: 180
10    host: generativelanguage.googleapis.com
11    body: "{\"contents\":[{\"parts\":[{\"text\":\"Say hi in Portuguese\"}],\"role\":\"user\"}],\"generationConfig\":{},\"systemInstruction\":{\"parts\":[{\"text\":\"You are a helpful assistant\"}],\"role\":\"user\"}}\n"
12    headers:
13      Content-Type:
14      - application/json
15      User-Agent:
16      - google-genai-sdk/1.23.0 gl-go/go1.24.5
17    url: https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:generateContent
18    method: POST
19  response:
20    proto: HTTP/2.0
21    proto_major: 2
22    proto_minor: 0
23    content_length: -1
24    uncompressed: true
25    body: "{\n  \"candidates\": [\n    {\n      \"content\": {\n        \"parts\": [\n          {\n            \"text\": \"Olá!\\n\\nIn Portuguese, \\\"hi\\\" can be translated as:\\n\\n*   **Oi** (very common and informal)\\n*   **Olá** (a bit more formal, but also widely used)\"\n          }\n        ],\n        \"role\": \"model\"\n      },\n      \"finishReason\": \"STOP\",\n      \"index\": 0\n    }\n  ],\n  \"usageMetadata\": {\n    \"promptTokenCount\": 11,\n    \"candidatesTokenCount\": 43,\n    \"totalTokenCount\": 77,\n    \"promptTokensDetails\": [\n      {\n        \"modality\": \"TEXT\",\n        \"tokenCount\": 11\n      }\n    ],\n    \"thoughtsTokenCount\": 23\n  },\n  \"modelVersion\": \"gemini-2.5-flash\",\n  \"responseId\": \"_Ui7aL_qEoCsz7IPmMvIqQ4\"\n}\n"
26    headers:
27      Content-Type:
28      - application/json; charset=UTF-8
29    status: 200 OK
30    code: 200
31    duration: 870.503208ms