1
0
Fork 0
mirror of https://github.com/maybe-finance/maybe.git synced 2025-08-10 07:55:21 +02:00

Consolidate and simplify OpenAI provider and provider concepts

This commit is contained in:
Zach Gollwitzer 2025-03-29 10:21:51 -04:00
parent b9681d9cee
commit 57023fdc85
16 changed files with 270 additions and 517 deletions

View file

@ -1,6 +1,8 @@
module Provider::ExchangeRateProvider
module Provider::ExchangeRateConcept
extend ActiveSupport::Concern
Rate = Data.define(:date, :from, :to, :rate)
def fetch_exchange_rate(from:, to:, date:)
raise NotImplementedError, "Subclasses must implement #fetch_exchange_rate"
end
@ -8,7 +10,4 @@ module Provider::ExchangeRateProvider
def fetch_exchange_rates(from:, to:, start_date:, end_date:)
raise NotImplementedError, "Subclasses must implement #fetch_exchange_rates"
end
private
Rate = Data.define(:date, :from, :to, :rate)
end

View file

@ -0,0 +1,12 @@
module Provider::LlmConcept
extend ActiveSupport::Concern
ChatMessage = Data.define(:id, :output_text)
ChatStreamChunk = Data.define(:type, :data)
ChatResponse = Data.define(:id, :model, :messages, :function_requests)
ChatFunctionRequest = Data.define(:id, :call_id, :function_name, :function_args)
def chat_response(prompt, model:, instructions: nil, functions: [], function_results: [], streamer: nil, previous_response_id: nil)
raise NotImplementedError, "Subclasses must implement #chat_response"
end
end

View file

@ -1,13 +0,0 @@
module Provider::LlmProvider
extend ActiveSupport::Concern
def chat_response(prompt, model:, instructions: nil, functions: [], function_results: [], streamer: nil, previous_response_id: nil)
raise NotImplementedError, "Subclasses must implement #chat_response"
end
private
Message = Data.define(:id, :output_text)
StreamChunk = Data.define(:type, :data)
ChatResponse = Data.define(:id, :model, :messages, :function_requests)
FunctionRequest = Data.define(:id, :call_id, :function_name, :function_args)
end

View file

@ -1,5 +1,5 @@
class Provider::Openai < Provider
include LlmProvider, Parser
include LlmConcept
# Subclass so errors caught in this provider are raised as Provider::Openai::Error
Error = Class.new(Provider::Error)
@ -16,93 +16,31 @@ class Provider::Openai < Provider
def chat_response(prompt, model:, instructions: nil, functions: [], function_results: [], streamer: nil, previous_response_id: nil)
with_provider_response do
proxy_streamer = proc do |chunk|
type = chunk.dig("type")
chat_config = ChatConfig.new(
functions: functions,
function_results: function_results
)
case type
when "response.output_text.delta", "response.refusal.delta"
streamer.call(StreamChunk.new(type: "output_text", data: chunk.dig("delta")))
when "response.completed"
raw_response = chunk.dig("response")
messages = extract_messages(raw_response).map do |message|
Message.new(
id: message[:id],
output_text: message[:output_text]
)
end
function_requests = extract_function_requests(raw_response).map do |function_request|
FunctionRequest.new(
id: function_request[:id],
call_id: function_request[:call_id],
function_name: function_request[:name],
function_args: function_request[:arguments]
)
end
response = ChatResponse.new(
id: extract_id(raw_response),
model: extract_model(raw_response),
messages: messages,
function_requests: function_requests
)
streamer.call(StreamChunk.new(type: "response", data: response))
# Proxy that converts raw stream to "LLM Provider concept" stream
stream_proxy = if streamer.present?
proc do |chunk|
parsed_chunk = ChatStreamParser.new(chunk).parsed
streamer.call(parsed_chunk) unless parsed_chunk.nil?
end
end
function_results_input = function_results.map do |fn_result|
{
type: "function_call_output",
call_id: fn_result[:provider_call_id],
output: fn_result[:result].to_json
}
end
prompt_input = [ { role: "user", content: prompt } ]
tools = functions.map do |fn|
{
type: "function",
name: fn[:name],
description: fn[:description],
parameters: fn[:params_schema],
strict: fn[:strict]
}
else
nil
end
raw_response = client.responses.create(parameters: {
model: model,
input: prompt_input + function_results_input,
input: chat_config.build_input(prompt),
instructions: instructions,
tools: tools,
tools: chat_config.tools,
previous_response_id: previous_response_id,
stream: streamer.present? ? proxy_streamer : nil
stream: stream_proxy
})
messages = extract_messages(raw_response).map do |message|
Message.new(
id: message[:id],
output_text: message[:output_text]
)
end
function_requests = extract_function_requests(raw_response).map do |function_request|
FunctionRequest.new(
id: function_request[:id],
call_id: function_request[:call_id],
function_name: function_request[:name],
function_args: function_request[:arguments]
)
end
ChatResponse.new(
id: extract_id(raw_response),
model: extract_model(raw_response),
messages: messages,
function_requests: function_requests
)
ChatParser.new(raw_response).parsed
end
end

View file

@ -0,0 +1,36 @@
class Provider::Openai::ChatConfig
def initialize(functions: [], function_results: [])
@functions = functions
@function_results = function_results
end
def tools
functions.map do |fn|
{
type: "function",
name: fn[:name],
description: fn[:description],
parameters: fn[:params_schema],
strict: fn[:strict]
}
end
end
def build_input(prompt)
results = function_results.map do |fn_result|
{
type: "function_call_output",
call_id: fn_result[:provider_call_id],
output: fn_result[:result].to_json
}
end
[
{ role: "user", content: prompt },
*results
]
end
private
attr_reader :functions, :function_results
end

View file

@ -0,0 +1,59 @@
class Provider::Openai::ChatParser
Error = Class.new(StandardError)
def initialize(object)
@object = object
end
def parsed
ChatResponse.new(
id: response_id,
model: response_model,
messages: messages,
function_requests: function_requests
)
end
private
attr_reader :object
ChatResponse = Provider::LlmConcept::ChatResponse
ChatMessage = Provider::LlmConcept::ChatMessage
ChatFunctionRequest = Provider::LlmConcept::ChatFunctionRequest
def response_id
object.dig("id")
end
def response_model
object.dig("model")
end
def messages
message_items = object.dig("output").filter { |item| item.dig("type") == "message" }
message_items.map do |message_item|
ChatMessage.new(
id: message_item.dig("id"),
output_text: message_item.dig("content").map do |content|
text = content.dig("text")
refusal = content.dig("refusal")
text || refusal
end.flatten.join("\n")
)
end
end
def function_requests
function_items = object.dig("output").filter { |item| item.dig("type") == "function_call" }
function_items.map do |function_item|
ChatFunctionRequest.new(
id: function_item.dig("id"),
call_id: function_item.dig("call_id"),
function_name: function_item.dig("name"),
function_args: function_item.dig("arguments")
)
end
end
end

View file

@ -1,118 +0,0 @@
class Provider::Openai::ChatResponseProcessor
include Provider::Openai::Parser
def initialize(message:, function_caller:, client:, subscribers:, instructions: nil)
@client = client
@message = message
@instructions = instructions
@function_caller = function_caller
@streamer = build_streamer(subscribers)
end
def build_streamer(subscribers)
ChatStreamer.new(
client: client,
function_caller: function_caller,
subscribers: subscribers
)
end
def process
raw_first_response = fetch_response(input, previous_response_id: previous_openai_response_id)
function_requests = extract_function_requests(raw_first_response)
function_calls = function_requests.map do |function_request|
function_caller.fulfill_request(function_request)
end
first_response = build_response(raw_first_response, function_calls: function_calls)
if first_response.function_calls.empty?
return [ first_response ]
end
raw_follow_up_response = fetch_response(
input + function_caller.build_results_input(function_calls),
previous_response_id: first_response.provider_id,
)
follow_up_response = build_response(raw_follow_up_response)
[ first_response, follow_up_response ]
end
private
attr_reader :client, :message, :instructions, :streamer, :function_caller
StreamChunk = Provider::LlmProvider::StreamChunk
ChatResponse = Provider::LlmProvider::ChatResponse
Message = Provider::LlmProvider::Message
FunctionCall = Provider::LlmProvider::FunctionCall
Error = Provider::Openai::Error
def build_response(response, function_calls: [])
ChatResponse.new(
provider_id: extract_id(response),
model: extract_model(response),
messages: extract_messages(response).map do |msg|
Message.new(
provider_id: msg[:id],
content: msg[:output_text]
)
end,
function_calls: function_calls
)
end
def fetch_response(input, previous_response_id: nil)
# raw_response = nil
# internal_streamer = proc do |chunk|
# type = chunk.dig("type")
# if type == "response.completed"
# raw_response = chunk.dig("response")
# end
# if streamer.present?
# case type
# when "response.output_text.delta", "response.refusal.delta"
# # We don't distinguish between text and refusal yet, so stream both the same
# streamer.call(StreamChunk.new(provider_type: "output_text", data: chunk.dig("delta")))
# when "response.function_call_arguments.done"
# streamer.call(StreamChunk.new(provider_type: "function_request", data: chunk.dig("arguments")))
# when "response.completed"
# normalized = normalize_chat_response(chunk.dig("response"), function_results: function_results)
# streamer.call(StreamChunk.new(provider_type: "response", data: normalized))
# end
# end
# end
client.responses.create(parameters: {
model: model,
input: input,
instructions: instructions,
tools: function_caller.openai_tools,
previous_response_id: previous_response_id,
stream: streamer
})
end
def chat
message.chat
end
def model
message.ai_model
end
def previous_openai_response_id
chat.latest_assistant_response_id
end
# Since we're using OpenAI's conversation state management, all we need to pass
# to input is the user message we're currently responding to.
def input
[ { role: "user", content: message.content } ]
end
end

View file

@ -0,0 +1,28 @@
class Provider::Openai::ChatStreamParser
Error = Class.new(StandardError)
def initialize(object)
@object = object
end
def parsed
type = object.dig("type")
case type
when "response.output_text.delta", "response.refusal.delta"
Chunk.new(type: "output_text", data: object.dig("delta"))
when "response.completed"
raw_response = object.dig("response")
Chunk.new(type: "response", data: parse_response(raw_response))
end
end
private
attr_reader :object
Chunk = Provider::LlmConcept::ChatStreamChunk
def parse_response(response)
Provider::Openai::ChatParser.new(response).parsed
end
end

View file

@ -1,80 +0,0 @@
# A stream proxy for OpenAI chat responses
#
# - Consumes OpenAI stream chunks
# - Outputs generic stream chunks to a "subscriber" (e.g. `Assistant`) if subscriber is supplied
class Provider::Openai::ChatStreamer
include Provider::Openai::Parser
def initialize(output_stream)
@output_stream = output_stream
end
def call(chunk)
output = parse_chunk(chunk)
output_stream.call(output) unless output.nil?
end
private
attr_reader :output_stream
Chunk = Provider::LlmProvider::StreamChunk
Response = Provider::LlmProvider::ChatResponse
Message = Provider::LlmProvider::Message
def parse_chunk(chunk)
type = chunk.dig("type")
case type
when "response.output_text.delta", "response.refusal.delta"
build_chunk("output_text", chunk.dig("delta"))
when "response.function_call_arguments.done"
build_chunk("function_request", chunk.dig("arguments"))
when "response.completed"
handle_response(chunk.dig("response"))
end
end
def handle_response(response)
function_requests = extract_function_requests(response)
function_calls = function_requests.map do |function_request|
@function_caller.fulfill_request(function_request)
end
normalized_response = build_response(response, function_calls: function_calls)
build_chunk("response", normalized_response)
end
def build_chunk(type, data)
Chunk.new(
provider_type: type,
data: data
)
end
def build_response(response, function_calls: [])
Response.new(
provider_id: extract_id(response),
model: extract_model(response),
messages: extract_messages(response).map do |msg|
Message.new(
provider_id: msg[:id],
content: msg[:output_text]
)
end,
function_calls: function_calls
)
end
def fetch_response(input, previous_response_id: nil)
client.responses.create(parameters: {
model: model,
input: input,
instructions: instructions,
tools: function_caller.openai_tools,
previous_response_id: previous_response_id,
stream: streamer
})
end
end

View file

@ -1,56 +0,0 @@
class Provider::Openai::FunctionCaller
def initialize(functions)
@functions = functions
end
def openai_tools
functions.map do |fn|
{
type: "function",
name: fn.name,
description: fn.description,
parameters: fn.params_schema,
strict: fn.strict_mode?
}
end
end
def build_results_input(function_calls)
function_calls.map do |fc|
{
type: "function_call_output",
call_id: fc.provider_call_id,
output: fc.result.to_json
}
end
end
def fulfill_request(function_request)
fn_name = function_request[:name]
fn_args = JSON.parse(function_request[:arguments])
fn = get_function(fn_name)
result = fn.call(fn_args)
Provider::LlmProvider::FunctionCall.new(
provider_id: function_request[:id],
provider_call_id: function_request[:call_id],
name: fn_name,
arguments: fn_args,
result: result
)
rescue => e
fn_execution_details = {
fn_name: fn_name,
fn_args: fn_args
}
raise Provider::Openai::Error.new(e, fn_execution_details)
end
private
attr_reader :functions
def get_function(name)
functions.find { |f| f.name == name }
end
end

View file

@ -1,41 +0,0 @@
module Provider::Openai::Parser
extend ActiveSupport::Concern
private
def extract_id(chat_response)
chat_response.dig("id")
end
def extract_model(chat_response)
chat_response.dig("model")
end
def extract_messages(chat_response)
message_items = chat_response.dig("output").filter { |item| item.dig("type") == "message" }
message_items.map do |message_item|
output_text = message_item.dig("content").map do |content|
text = content.dig("text")
refusal = content.dig("refusal")
text || refusal
end.flatten.join("\n")
{
id: message_item.dig("id"),
output_text: output_text
}
end
end
def extract_function_requests(chat_response)
chat_response.dig("output").filter { |item| item.dig("type") == "function_call" }.map do |function_call|
{
id: function_call.dig("id"),
call_id: function_call.dig("call_id"),
name: function_call.dig("name"),
arguments: function_call.dig("arguments")
}
end
end
end

View file

@ -1,6 +1,10 @@
module Provider::SecurityProvider
module Provider::SecurityConcept
extend ActiveSupport::Concern
Security = Data.define(:symbol, :name, :logo_url, :exchange_operating_mic)
SecurityInfo = Data.define(:symbol, :name, :links, :logo_url, :description, :kind)
Price = Data.define(:security, :date, :price, :currency)
def search_securities(symbol, country_code: nil, exchange_operating_mic: nil)
raise NotImplementedError, "Subclasses must implement #search_securities"
end
@ -16,9 +20,4 @@ module Provider::SecurityProvider
def fetch_security_prices(security, start_date:, end_date:)
raise NotImplementedError, "Subclasses must implement #fetch_security_prices"
end
private
Security = Data.define(:symbol, :name, :logo_url, :exchange_operating_mic)
SecurityInfo = Data.define(:symbol, :name, :links, :logo_url, :description, :kind)
Price = Data.define(:security, :date, :price, :currency)
end

View file

@ -1,5 +1,5 @@
class Provider::Synth < Provider
include ExchangeRateProvider, SecurityProvider
include ExchangeRateConcept, SecurityConcept
# Subclass so errors caught in this provider are raised as Provider::Synth::Error
Error = Class.new(Provider::Error)

View file

@ -56,11 +56,22 @@ class Provider::OpenaiTest < ActiveSupport::TestCase
test "chat response with function calls" do
VCR.use_cassette("openai/chat/function_calls") do
prompt = "What is my net worth?"
functions = [
{
name: "get_net_worth",
description: "Gets a user's net worth",
params_schema: { type: "object", properties: {}, required: [], additionalProperties: false },
strict: true
}
]
first_response = @subject.chat_response(
"What is my net worth?",
prompt,
model: @subject_model,
instructions: "Use the tools available to you to answer the user's question.",
functions: [ PredictableToolFunction.new.to_h ]
functions: functions
)
assert first_response.success?
@ -70,7 +81,7 @@ class Provider::OpenaiTest < ActiveSupport::TestCase
assert function_request.present?
second_response = @subject.chat_response(
"What is my net worth?",
prompt,
model: @subject_model,
function_results: [
{
@ -78,7 +89,7 @@ class Provider::OpenaiTest < ActiveSupport::TestCase
provider_call_id: function_request.call_id,
name: function_request.function_name,
arguments: function_request.function_args,
result: PredictableToolFunction.expected_test_result
result: { amount: 10000, currency: "USD" }
}
],
previous_response_id: first_response.data.id
@ -86,24 +97,35 @@ class Provider::OpenaiTest < ActiveSupport::TestCase
assert second_response.success?
assert_equal 1, second_response.data.messages.size
assert_includes second_response.data.messages.first.output_text, PredictableToolFunction.expected_test_result
assert_includes second_response.data.messages.first.output_text, "$10,000"
end
end
test "streams chat response with tool calls" do
VCR.use_cassette("openai/chat/streaming_tool_calls", record: :all) do
VCR.use_cassette("openai/chat/streaming_tool_calls") do
collected_chunks = []
mock_streamer = proc do |chunk|
collected_chunks << chunk
end
prompt = "What is my net worth?"
functions = [
{
name: "get_net_worth",
description: "Gets a user's net worth",
params_schema: { type: "object", properties: {}, required: [], additionalProperties: false },
strict: true
}
]
# Call #1: First streaming call, will return a function request
@subject.chat_response(
"What is my net worth?",
prompt,
model: @subject_model,
instructions: "Use the tools available to you to answer the user's question.",
functions: [ PredictableToolFunction.new.to_h ],
functions: functions,
streamer: mock_streamer
)
@ -116,12 +138,12 @@ class Provider::OpenaiTest < ActiveSupport::TestCase
first_response = response_chunks.first.data
function_request = first_response.function_requests.first
# Reset collected chunks
# Reset collected chunks for the second call
collected_chunks = []
# Call #2: Second streaming call, will return a function result
@subject.chat_response(
"What is my net worth?",
prompt,
model: @subject_model,
function_results: [
{
@ -129,7 +151,7 @@ class Provider::OpenaiTest < ActiveSupport::TestCase
provider_call_id: function_request.call_id,
name: function_request.function_name,
arguments: function_request.function_args,
result: PredictableToolFunction.expected_test_result
result: { amount: 10000, currency: "USD" }
}
],
previous_response_id: first_response.id,
@ -142,42 +164,7 @@ class Provider::OpenaiTest < ActiveSupport::TestCase
assert text_chunks.size >= 1
assert_equal 1, response_chunks.size
assert_includes response_chunks.first.data.messages.first.output_text, PredictableToolFunction.expected_test_result
assert_includes response_chunks.first.data.messages.first.output_text, "$10,000"
end
end
private
class PredictableToolFunction
class << self
def expected_test_result
"$124,200"
end
def name
"get_net_worth"
end
def description
"Gets user net worth data"
end
end
def call(params = {})
self.class.expected_test_result
end
def to_h
{
name: self.class.name,
description: self.class.description,
params_schema: {
type: "object",
properties: {},
required: [],
additionalProperties: false
},
strict: true
}
end
end
end

View file

@ -8,7 +8,7 @@ http_interactions:
string: '{"model":"gpt-4o","input":[{"role":"user","content":"What is my net
worth?"}],"instructions":"Use the tools available to you to answer the user''s
question.","tools":[{"type":"function","name":"get_net_worth","description":"Gets
user net worth data","parameters":{"type":"object","properties":{},"required":[],"additionalProperties":false},"strict":true}],"previous_response_id":null,"stream":null}'
a user''s net worth","parameters":{"type":"object","properties":{},"required":[],"additionalProperties":false},"strict":true}],"previous_response_id":null,"stream":null}'
headers:
Content-Type:
- application/json
@ -26,7 +26,7 @@ http_interactions:
message: OK
headers:
Date:
- Fri, 28 Mar 2025 23:39:03 GMT
- Sat, 29 Mar 2025 14:18:06 GMT
Content-Type:
- application/json
Transfer-Encoding:
@ -38,34 +38,34 @@ http_interactions:
Openai-Organization:
- "<OPENAI_ORGANIZATION_ID>"
X-Request-Id:
- req_acb002a07726b73124003bc11961db12
- req_c49d8a1689abc268c93ee8f78c7acb87
Openai-Processing-Ms:
- '692'
- '1309'
Strict-Transport-Security:
- max-age=31536000; includeSubDomains; preload
Cf-Cache-Status:
- DYNAMIC
Set-Cookie:
- __cf_bm=yktQTGvoMAowiz7E3sLQgydW3Cy6tmHY_KGgLVGm.iE-1743205143-1.0.1.1-XGhvweZhPNRuqFMRcaIvMOyVb.Z1VOf4AtZmc4gPTArM1sU4T3mya2c0QwixjuCSJYuw2BwmWIKnZrDe_xdx6OtV6HQfwB1xASqg79FI6Bw;
path=/; expires=Sat, 29-Mar-25 00:09:03 GMT; domain=.api.openai.com; HttpOnly;
- __cf_bm=GUFPKHTRTDYiA2pC84rAQH0hVR0FXzsY9LPaFgfMSYE-1743257886-1.0.1.1-2mSXiHXVrPGtsLug0Jnr7hwEZxmY1IKBWLWBoqUfvRseZL.xnPspGn1TGDTtb7bfuld8wj3sYGepLCG4GjmLKpMDisdYf9Na8tQZbjLk8_w;
path=/; expires=Sat, 29-Mar-25 14:48:06 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=I23nR.I1jT11eBu6591kDu2RxoXpHGpOn27Syvx7Zr8-1743205143976-0.0.1.1-604800000;
- _cfuvid=.VkJixs9hPHWdUSlP_wV3mVUAiuYnwGyL2TSr7b54dc-1743257886530-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
X-Content-Type-Options:
- nosniff
Server:
- cloudflare
Cf-Ray:
- 927af6f0ef5ccf7c-CMH
- 927ffe95fdd51098-ORD
Alt-Svc:
- h3=":443"; ma=86400
body:
encoding: ASCII-8BIT
string: |-
{
"id": "resp_67e733174818819294a51c6bc4e0814c08d69b77f6076b69",
"id": "resp_67e8011d34b48192bc01d309f7d760440cb01d52dee3be7d",
"object": "response",
"created_at": 1743205143,
"created_at": 1743257885,
"status": "completed",
"error": null,
"incomplete_details": null,
@ -75,8 +75,8 @@ http_interactions:
"output": [
{
"type": "function_call",
"id": "fc_67e73317d104819287a21542e2484a4708d69b77f6076b69",
"call_id": "call_IoG6NAK8MNIoY6x9bzfHnpp6",
"id": "fc_67e8011dcddc8192a0cf2a4959a6de630cb01d52dee3be7d",
"call_id": "call_yi5avefdxpT7aJfgkyEizzEV",
"name": "get_net_worth",
"arguments": "{}",
"status": "completed"
@ -99,7 +99,7 @@ http_interactions:
"tools": [
{
"type": "function",
"description": "Gets user net worth data",
"description": "Gets a user's net worth",
"name": "get_net_worth",
"parameters": {
"type": "object",
@ -126,14 +126,14 @@ http_interactions:
"user": null,
"metadata": {}
}
recorded_at: Fri, 28 Mar 2025 23:39:03 GMT
recorded_at: Sat, 29 Mar 2025 14:18:06 GMT
- request:
method: post
uri: https://api.openai.com/v1/responses
body:
encoding: UTF-8
string: '{"model":"gpt-4o","input":[{"role":"user","content":"What is my net
worth?"},{"type":"function_call_output","call_id":"call_IoG6NAK8MNIoY6x9bzfHnpp6","output":"\"$124,200\""}],"instructions":null,"tools":[],"previous_response_id":"resp_67e733174818819294a51c6bc4e0814c08d69b77f6076b69","stream":null}'
worth?"},{"type":"function_call_output","call_id":"call_yi5avefdxpT7aJfgkyEizzEV","output":"{\"amount\":10000,\"currency\":\"USD\"}"}],"instructions":null,"tools":[],"previous_response_id":"resp_67e8011d34b48192bc01d309f7d760440cb01d52dee3be7d","stream":null}'
headers:
Content-Type:
- application/json
@ -151,7 +151,7 @@ http_interactions:
message: OK
headers:
Date:
- Fri, 28 Mar 2025 23:39:04 GMT
- Sat, 29 Mar 2025 14:18:07 GMT
Content-Type:
- application/json
Transfer-Encoding:
@ -163,34 +163,34 @@ http_interactions:
Openai-Organization:
- "<OPENAI_ORGANIZATION_ID>"
X-Request-Id:
- req_107c55eac5d356ea6a092e179644be74
- req_0317fcff946ac69acb4d49c06d1926ab
Openai-Processing-Ms:
- '666'
- '715'
Strict-Transport-Security:
- max-age=31536000; includeSubDomains; preload
Cf-Cache-Status:
- DYNAMIC
Set-Cookie:
- __cf_bm=l1y3jPeCKUdL4YW9VR8SZAYbPJygY.O5iv8GEmHYyo0-1743205144-1.0.1.1-tuO_26bpHCwjp40gloE4Pu.Pi6wN7K9yV5rFTNDF.PhDjBb8N6TW5POC9qEhbSWWLzNV3nYn.U0E8ft3iXTOMX7i3mXGJOVb1t7TK6T5rVo;
path=/; expires=Sat, 29-Mar-25 00:09:04 GMT; domain=.api.openai.com; HttpOnly;
- __cf_bm=47vokVZwKVHuHroz8brhe5plUsMFk5vuHGB4gdVOlMs-1743257887-1.0.1.1-WYhtm8897cu8DUscVo1A17mKMLMeMnRKWsvqhw3xIbH0xKhHsFJcfR7ohbUd0JOQFXAAGUmLqUVQObLdN7HbBKehRDl8n51QAV35gEHb3.w;
path=/; expires=Sat, 29-Mar-25 14:48:07 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=Vv42lFdIhJxtY593j4tNCHE5WmPd8d3kwT3Fe.CRYwU-1743205144880-0.0.1.1-604800000;
- _cfuvid=p3HVlO1RXOqc3dLedrcBXx0H_7Tub4nZPqAOPciNPP4-1743257887594-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
X-Content-Type-Options:
- nosniff
Server:
- cloudflare
Cf-Ray:
- 927af6f6affe114d-ORD
- 927ffea078bcfa21-ORD
Alt-Svc:
- h3=":443"; ma=86400
body:
encoding: ASCII-8BIT
string: |-
{
"id": "resp_67e733182f5c8192917aa9ffaf3f26e508d69b77f6076b69",
"id": "resp_67e8011edd2881928f71c1a43e0a738d0cb01d52dee3be7d",
"object": "response",
"created_at": 1743205144,
"created_at": 1743257886,
"status": "completed",
"error": null,
"incomplete_details": null,
@ -200,20 +200,20 @@ http_interactions:
"output": [
{
"type": "message",
"id": "msg_67e73318a7448192b0cc40fd88d9881408d69b77f6076b69",
"id": "msg_67e8011f59b08192bca7846a4988f4140cb01d52dee3be7d",
"status": "completed",
"role": "assistant",
"content": [
{
"type": "output_text",
"text": "Your net worth is $124,200.",
"text": "Your net worth is $10,000 USD.",
"annotations": []
}
]
}
],
"parallel_tool_calls": true,
"previous_response_id": "resp_67e733174818819294a51c6bc4e0814c08d69b77f6076b69",
"previous_response_id": "resp_67e8011d34b48192bc01d309f7d760440cb01d52dee3be7d",
"reasoning": {
"effort": null,
"generate_summary": null
@ -230,18 +230,18 @@ http_interactions:
"top_p": 1.0,
"truncation": "disabled",
"usage": {
"input_tokens": 51,
"input_tokens": 56,
"input_tokens_details": {
"cached_tokens": 0
},
"output_tokens": 10,
"output_tokens": 11,
"output_tokens_details": {
"reasoning_tokens": 0
},
"total_tokens": 61
"total_tokens": 67
},
"user": null,
"metadata": {}
}
recorded_at: Fri, 28 Mar 2025 23:39:04 GMT
recorded_at: Sat, 29 Mar 2025 14:18:07 GMT
recorded_with: VCR 6.3.1

View file

@ -8,7 +8,7 @@ http_interactions:
string: '{"model":"gpt-4o","input":[{"role":"user","content":"What is my net
worth?"}],"instructions":"Use the tools available to you to answer the user''s
question.","tools":[{"type":"function","name":"get_net_worth","description":"Gets
user net worth data","parameters":{"type":"object","properties":{},"required":[],"additionalProperties":false},"strict":true}],"previous_response_id":null,"stream":true}'
a user''s net worth","parameters":{"type":"object","properties":{},"required":[],"additionalProperties":false},"strict":true}],"previous_response_id":null,"stream":true}'
headers:
Content-Type:
- application/json
@ -26,7 +26,7 @@ http_interactions:
message: OK
headers:
Date:
- Fri, 28 Mar 2025 23:39:20 GMT
- Sat, 29 Mar 2025 14:14:35 GMT
Content-Type:
- text/event-stream; charset=utf-8
Transfer-Encoding:
@ -38,59 +38,59 @@ http_interactions:
Openai-Organization:
- "<OPENAI_ORGANIZATION_ID>"
X-Request-Id:
- req_416a3bb3593376c1829980cc11557c9e
- req_078175dfa5970ebbad31e3f0a03225f4
Openai-Processing-Ms:
- '171'
- '132'
Strict-Transport-Security:
- max-age=31536000; includeSubDomains; preload
Cf-Cache-Status:
- DYNAMIC
Set-Cookie:
- __cf_bm=2mv3jCpb0SZRXFvlWu28X1mr3Oa6z9QgwtmcclI9J_g-1743205160-1.0.1.1-1P22aYvtQqY_byLJMPyJEYff2m.rYivVXc313sCETck2g4NIg66SHPLTkjOiyqHBYMyifOJ_EVXNFK3QeEDYlPbvB9OzNr_Vo.S.UqPYpJI;
path=/; expires=Sat, 29-Mar-25 00:09:20 GMT; domain=.api.openai.com; HttpOnly;
- __cf_bm=7ZR1M3zf1IkuQzhn1VGwS5c5yN70StMK_aNHwMVsYZc-1743257675-1.0.1.1-lIQuRCTXQ5PZL2CpAb5GMM0AJWJQSf6IgFtfNdmqTMBjJz5q5gQVGSti0OByFsepuMBASwYGr4QLznmsJvxwqKoj_JdzATNamQQee0K7mSo;
path=/; expires=Sat, 29-Mar-25 14:44:35 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=KTcsmNHnsQWuGiwS1hyyOm7vtycPv3d6yh6pOLElMAg-1743205160250-0.0.1.1-604800000;
- _cfuvid=100NqQJKUFHP6Y2.DdSEuHiTiKk7Hv5zYcghEa02geE-1743257675522-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
X-Content-Type-Options:
- nosniff
Server:
- cloudflare
Cf-Ray:
- 927af7582c5ffa19-ORD
- 927ff97698b3e990-ORD
Alt-Svc:
- h3=":443"; ma=86400
body:
encoding: UTF-8
string: |+
event: response.created
data: {"type":"response.created","response":{"id":"resp_67e73328128881928023b1fa925c2bcc0c494b972720dd48","object":"response","created_at":1743205160,"status":"in_progress","error":null,"incomplete_details":null,"instructions":"Use the tools available to you to answer the user's question.","max_output_tokens":null,"model":"gpt-4o-2024-08-06","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[{"type":"function","description":"Gets user net worth data","name":"get_net_worth","parameters":{"type":"object","properties":{},"required":[],"additionalProperties":false},"strict":true}],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
data: {"type":"response.created","response":{"id":"resp_67e8004b64408192b4892e3a9d4cb93a09d69362ec0c8fc7","object":"response","created_at":1743257675,"status":"in_progress","error":null,"incomplete_details":null,"instructions":"Use the tools available to you to answer the user's question.","max_output_tokens":null,"model":"gpt-4o-2024-08-06","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[{"type":"function","description":"Gets a user's net worth","name":"get_net_worth","parameters":{"type":"object","properties":{},"required":[],"additionalProperties":false},"strict":true}],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
event: response.in_progress
data: {"type":"response.in_progress","response":{"id":"resp_67e73328128881928023b1fa925c2bcc0c494b972720dd48","object":"response","created_at":1743205160,"status":"in_progress","error":null,"incomplete_details":null,"instructions":"Use the tools available to you to answer the user's question.","max_output_tokens":null,"model":"gpt-4o-2024-08-06","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[{"type":"function","description":"Gets user net worth data","name":"get_net_worth","parameters":{"type":"object","properties":{},"required":[],"additionalProperties":false},"strict":true}],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
data: {"type":"response.in_progress","response":{"id":"resp_67e8004b64408192b4892e3a9d4cb93a09d69362ec0c8fc7","object":"response","created_at":1743257675,"status":"in_progress","error":null,"incomplete_details":null,"instructions":"Use the tools available to you to answer the user's question.","max_output_tokens":null,"model":"gpt-4o-2024-08-06","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[{"type":"function","description":"Gets a user's net worth","name":"get_net_worth","parameters":{"type":"object","properties":{},"required":[],"additionalProperties":false},"strict":true}],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
event: response.output_item.added
data: {"type":"response.output_item.added","output_index":0,"item":{"type":"function_call","id":"fc_67e73329fa348192b5b5dafb6c2f018e0c494b972720dd48","call_id":"call_vZ9Poyn4ZFtvYwQOeTGFAUSU","name":"get_net_worth","arguments":"","status":"in_progress"}}
data: {"type":"response.output_item.added","output_index":0,"item":{"type":"function_call","id":"fc_67e8004bbf6c819282aa8180b46884cc09d69362ec0c8fc7","call_id":"call_y6qg0v57I4dMxQp5AVyPquqI","name":"get_net_worth","arguments":"","status":"in_progress"}}
event: response.function_call_arguments.delta
data: {"type":"response.function_call_arguments.delta","item_id":"fc_67e73329fa348192b5b5dafb6c2f018e0c494b972720dd48","output_index":0,"delta":"{}"}
data: {"type":"response.function_call_arguments.delta","item_id":"fc_67e8004bbf6c819282aa8180b46884cc09d69362ec0c8fc7","output_index":0,"delta":"{}"}
event: response.function_call_arguments.done
data: {"type":"response.function_call_arguments.done","item_id":"fc_67e73329fa348192b5b5dafb6c2f018e0c494b972720dd48","output_index":0,"arguments":"{}"}
data: {"type":"response.function_call_arguments.done","item_id":"fc_67e8004bbf6c819282aa8180b46884cc09d69362ec0c8fc7","output_index":0,"arguments":"{}"}
event: response.output_item.done
data: {"type":"response.output_item.done","output_index":0,"item":{"type":"function_call","id":"fc_67e73329fa348192b5b5dafb6c2f018e0c494b972720dd48","call_id":"call_vZ9Poyn4ZFtvYwQOeTGFAUSU","name":"get_net_worth","arguments":"{}","status":"completed"}}
data: {"type":"response.output_item.done","output_index":0,"item":{"type":"function_call","id":"fc_67e8004bbf6c819282aa8180b46884cc09d69362ec0c8fc7","call_id":"call_y6qg0v57I4dMxQp5AVyPquqI","name":"get_net_worth","arguments":"{}","status":"completed"}}
event: response.completed
data: {"type":"response.completed","response":{"id":"resp_67e73328128881928023b1fa925c2bcc0c494b972720dd48","object":"response","created_at":1743205160,"status":"completed","error":null,"incomplete_details":null,"instructions":"Use the tools available to you to answer the user's question.","max_output_tokens":null,"model":"gpt-4o-2024-08-06","output":[{"type":"function_call","id":"fc_67e73329fa348192b5b5dafb6c2f018e0c494b972720dd48","call_id":"call_vZ9Poyn4ZFtvYwQOeTGFAUSU","name":"get_net_worth","arguments":"{}","status":"completed"}],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[{"type":"function","description":"Gets user net worth data","name":"get_net_worth","parameters":{"type":"object","properties":{},"required":[],"additionalProperties":false},"strict":true}],"top_p":1.0,"truncation":"disabled","usage":{"input_tokens":55,"input_tokens_details":{"cached_tokens":0},"output_tokens":13,"output_tokens_details":{"reasoning_tokens":0},"total_tokens":68},"user":null,"metadata":{}}}
data: {"type":"response.completed","response":{"id":"resp_67e8004b64408192b4892e3a9d4cb93a09d69362ec0c8fc7","object":"response","created_at":1743257675,"status":"completed","error":null,"incomplete_details":null,"instructions":"Use the tools available to you to answer the user's question.","max_output_tokens":null,"model":"gpt-4o-2024-08-06","output":[{"type":"function_call","id":"fc_67e8004bbf6c819282aa8180b46884cc09d69362ec0c8fc7","call_id":"call_y6qg0v57I4dMxQp5AVyPquqI","name":"get_net_worth","arguments":"{}","status":"completed"}],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[{"type":"function","description":"Gets a user's net worth","name":"get_net_worth","parameters":{"type":"object","properties":{},"required":[],"additionalProperties":false},"strict":true}],"top_p":1.0,"truncation":"disabled","usage":{"input_tokens":55,"input_tokens_details":{"cached_tokens":0},"output_tokens":13,"output_tokens_details":{"reasoning_tokens":0},"total_tokens":68},"user":null,"metadata":{}}}
recorded_at: Fri, 28 Mar 2025 23:39:22 GMT
recorded_at: Sat, 29 Mar 2025 14:14:35 GMT
- request:
method: post
uri: https://api.openai.com/v1/responses
body:
encoding: UTF-8
string: '{"model":"gpt-4o","input":[{"role":"user","content":"What is my net
worth?"},{"type":"function_call_output","call_id":"call_vZ9Poyn4ZFtvYwQOeTGFAUSU","output":"\"$124,200\""}],"instructions":null,"tools":[],"previous_response_id":"resp_67e73328128881928023b1fa925c2bcc0c494b972720dd48","stream":true}'
worth?"},{"type":"function_call_output","call_id":"call_y6qg0v57I4dMxQp5AVyPquqI","output":"{\"amount\":10000,\"currency\":\"USD\"}"}],"instructions":null,"tools":[],"previous_response_id":"resp_67e8004b64408192b4892e3a9d4cb93a09d69362ec0c8fc7","stream":true}'
headers:
Content-Type:
- application/json
@ -108,7 +108,7 @@ http_interactions:
message: OK
headers:
Date:
- Fri, 28 Mar 2025 23:39:22 GMT
- Sat, 29 Mar 2025 14:14:36 GMT
Content-Type:
- text/event-stream; charset=utf-8
Transfer-Encoding:
@ -120,81 +120,84 @@ http_interactions:
Openai-Organization:
- "<OPENAI_ORGANIZATION_ID>"
X-Request-Id:
- req_96db0e490c091fd28d3849cb2b7494dd
- req_929f465c1816998022c04d13826bbcab
Openai-Processing-Ms:
- '127'
- '196'
Strict-Transport-Security:
- max-age=31536000; includeSubDomains; preload
Cf-Cache-Status:
- DYNAMIC
Set-Cookie:
- __cf_bm=uCEzdEuc0NlONCs9_aE4Zlk_.gTSlFJ8f1Wih1A8C.4-1743205162-1.0.1.1-3O16QsxbyafPXR1z4aXLAf.s9MlH8j1NF_DLrC.5MvWO3j6c7X2Xao1ro6jiM8QSE62d5TnzfnWaqz.HPmtpaBOdD4l3r3Uf_7vKltr27T4;
path=/; expires=Sat, 29-Mar-25 00:09:22 GMT; domain=.api.openai.com; HttpOnly;
- __cf_bm=zbXv2ZQZ49E0aXVr8sIy27x6sILi6AWvXwl6Lasv3Kg-1743257676-1.0.1.1-2GUFgWaard_ZCFxXLwryeGTVWiG8B29rBKzJi9Lh4oIG3UQKSwmxxM81s4q02Lma.9CUTLXEAfuo6UCoXw7NVYJxbSbjSCCcSlq5P4mp0qU;
path=/; expires=Sat, 29-Mar-25 14:44:36 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=Daug1DTioODhGs.oCPbiMGRU8rN4cooGraWTg30zUH4-1743205162681-0.0.1.1-604800000;
- _cfuvid=el6i6ZNbUFa0SrSM5FnUlUjqAoHx40mHpDhQM_FaNoE-1743257676357-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
X-Content-Type-Options:
- nosniff
Server:
- cloudflare
Cf-Ray:
- 927af7696faa10ee-ORD
- 927ff97b8d6e109d-ORD
Alt-Svc:
- h3=":443"; ma=86400
body:
encoding: UTF-8
string: |+
event: response.created
data: {"type":"response.created","response":{"id":"resp_67e7332a89f48192a7c5da3f21bd48940c494b972720dd48","object":"response","created_at":1743205162,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4o-2024-08-06","output":[],"parallel_tool_calls":true,"previous_response_id":"resp_67e73328128881928023b1fa925c2bcc0c494b972720dd48","reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
data: {"type":"response.created","response":{"id":"resp_67e8004c2938819292d684636e261ee509d69362ec0c8fc7","object":"response","created_at":1743257676,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4o-2024-08-06","output":[],"parallel_tool_calls":true,"previous_response_id":"resp_67e8004b64408192b4892e3a9d4cb93a09d69362ec0c8fc7","reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
event: response.in_progress
data: {"type":"response.in_progress","response":{"id":"resp_67e7332a89f48192a7c5da3f21bd48940c494b972720dd48","object":"response","created_at":1743205162,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4o-2024-08-06","output":[],"parallel_tool_calls":true,"previous_response_id":"resp_67e73328128881928023b1fa925c2bcc0c494b972720dd48","reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
data: {"type":"response.in_progress","response":{"id":"resp_67e8004c2938819292d684636e261ee509d69362ec0c8fc7","object":"response","created_at":1743257676,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4o-2024-08-06","output":[],"parallel_tool_calls":true,"previous_response_id":"resp_67e8004b64408192b4892e3a9d4cb93a09d69362ec0c8fc7","reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"truncation":"disabled","usage":null,"user":null,"metadata":{}}}
event: response.output_item.added
data: {"type":"response.output_item.added","output_index":0,"item":{"type":"message","id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","status":"in_progress","role":"assistant","content":[]}}
data: {"type":"response.output_item.added","output_index":0,"item":{"type":"message","id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","status":"in_progress","role":"assistant","content":[]}}
event: response.content_part.added
data: {"type":"response.content_part.added","item_id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","output_index":0,"content_index":0,"part":{"type":"output_text","text":"","annotations":[]}}
data: {"type":"response.content_part.added","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"part":{"type":"output_text","text":"","annotations":[]}}
event: response.output_text.delta
data: {"type":"response.output_text.delta","item_id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","output_index":0,"content_index":0,"delta":"Your"}
data: {"type":"response.output_text.delta","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"delta":"Your"}
event: response.output_text.delta
data: {"type":"response.output_text.delta","item_id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","output_index":0,"content_index":0,"delta":" net"}
data: {"type":"response.output_text.delta","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"delta":" net"}
event: response.output_text.delta
data: {"type":"response.output_text.delta","item_id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","output_index":0,"content_index":0,"delta":" worth"}
data: {"type":"response.output_text.delta","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"delta":" worth"}
event: response.output_text.delta
data: {"type":"response.output_text.delta","item_id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","output_index":0,"content_index":0,"delta":" is"}
data: {"type":"response.output_text.delta","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"delta":" is"}
event: response.output_text.delta
data: {"type":"response.output_text.delta","item_id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","output_index":0,"content_index":0,"delta":" $"}
data: {"type":"response.output_text.delta","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"delta":" $"}
event: response.output_text.delta
data: {"type":"response.output_text.delta","item_id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","output_index":0,"content_index":0,"delta":"124"}
data: {"type":"response.output_text.delta","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"delta":"10"}
event: response.output_text.delta
data: {"type":"response.output_text.delta","item_id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","output_index":0,"content_index":0,"delta":","}
data: {"type":"response.output_text.delta","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"delta":","}
event: response.output_text.delta
data: {"type":"response.output_text.delta","item_id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","output_index":0,"content_index":0,"delta":"200"}
data: {"type":"response.output_text.delta","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"delta":"000"}
event: response.output_text.delta
data: {"type":"response.output_text.delta","item_id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","output_index":0,"content_index":0,"delta":"."}
data: {"type":"response.output_text.delta","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"delta":" USD"}
event: response.output_text.delta
data: {"type":"response.output_text.delta","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"delta":"."}
event: response.output_text.done
data: {"type":"response.output_text.done","item_id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","output_index":0,"content_index":0,"text":"Your net worth is $124,200."}
data: {"type":"response.output_text.done","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"text":"Your net worth is $10,000 USD."}
event: response.content_part.done
data: {"type":"response.content_part.done","item_id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","output_index":0,"content_index":0,"part":{"type":"output_text","text":"Your net worth is $124,200.","annotations":[]}}
data: {"type":"response.content_part.done","item_id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","output_index":0,"content_index":0,"part":{"type":"output_text","text":"Your net worth is $10,000 USD.","annotations":[]}}
event: response.output_item.done
data: {"type":"response.output_item.done","output_index":0,"item":{"type":"message","id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","status":"completed","role":"assistant","content":[{"type":"output_text","text":"Your net worth is $124,200.","annotations":[]}]}}
data: {"type":"response.output_item.done","output_index":0,"item":{"type":"message","id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","status":"completed","role":"assistant","content":[{"type":"output_text","text":"Your net worth is $10,000 USD.","annotations":[]}]}}
event: response.completed
data: {"type":"response.completed","response":{"id":"resp_67e7332a89f48192a7c5da3f21bd48940c494b972720dd48","object":"response","created_at":1743205162,"status":"completed","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4o-2024-08-06","output":[{"type":"message","id":"msg_67e7332af2a0819291cf0cebaf49ad960c494b972720dd48","status":"completed","role":"assistant","content":[{"type":"output_text","text":"Your net worth is $124,200.","annotations":[]}]}],"parallel_tool_calls":true,"previous_response_id":"resp_67e73328128881928023b1fa925c2bcc0c494b972720dd48","reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"truncation":"disabled","usage":{"input_tokens":51,"input_tokens_details":{"cached_tokens":0},"output_tokens":10,"output_tokens_details":{"reasoning_tokens":0},"total_tokens":61},"user":null,"metadata":{}}}
data: {"type":"response.completed","response":{"id":"resp_67e8004c2938819292d684636e261ee509d69362ec0c8fc7","object":"response","created_at":1743257676,"status":"completed","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":null,"model":"gpt-4o-2024-08-06","output":[{"type":"message","id":"msg_67e8004cc2908192a1d238cb93e95b5309d69362ec0c8fc7","status":"completed","role":"assistant","content":[{"type":"output_text","text":"Your net worth is $10,000 USD.","annotations":[]}]}],"parallel_tool_calls":true,"previous_response_id":"resp_67e8004b64408192b4892e3a9d4cb93a09d69362ec0c8fc7","reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":1.0,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"truncation":"disabled","usage":{"input_tokens":56,"input_tokens_details":{"cached_tokens":0},"output_tokens":11,"output_tokens_details":{"reasoning_tokens":0},"total_tokens":67},"user":null,"metadata":{}}}
recorded_at: Fri, 28 Mar 2025 23:39:23 GMT
recorded_at: Sat, 29 Mar 2025 14:14:37 GMT
recorded_with: VCR 6.3.1
...