@@ -880,9 +880,9 @@ def test_convert_response_to_chat_result_anthropic_cache_tokens(llm: ChatDatabri
880880 choice = Choice (index = 0 , message = message , finish_reason = "stop" , logprobs = None )
881881 usage = _create_claude_completion_usage ()
882882 response = ChatCompletion (
883- id = "chatcmpl_test" ,
883+ id = _MOCK_CHAT_RESPONSE [ "id" ] ,
884884 choices = [choice ],
885- created = 1721875529 ,
885+ created = _MOCK_CHAT_RESPONSE [ "created" ] ,
886886 model = "databricks-claude-sonnet-4-5" ,
887887 object = "chat.completion" ,
888888 usage = usage ,
@@ -903,9 +903,9 @@ def test_convert_response_to_chat_result_openai_cache_tokens(llm: ChatDatabricks
903903 choice = Choice (index = 0 , message = message , finish_reason = "stop" , logprobs = None )
904904 usage = _create_openai_completion_usage ()
905905 response = ChatCompletion (
906- id = "chatcmpl_test" ,
906+ id = _MOCK_CHAT_RESPONSE [ "id" ] ,
907907 choices = [choice ],
908- created = 1721875529 ,
908+ created = _MOCK_CHAT_RESPONSE [ "created" ] ,
909909 model = "gpt-4o" ,
910910 object = "chat.completion" ,
911911 usage = usage ,
@@ -924,9 +924,9 @@ def test_convert_response_to_chat_result_no_cache_tokens(llm: ChatDatabricks) ->
924924 choice = Choice (index = 0 , message = message , finish_reason = "stop" , logprobs = None )
925925 usage = CompletionUsage (prompt_tokens = 100 , completion_tokens = 50 , total_tokens = 150 )
926926 response = ChatCompletion (
927- id = "chatcmpl_test" ,
927+ id = _MOCK_CHAT_RESPONSE [ "id" ] ,
928928 choices = [choice ],
929- created = 1721875529 ,
929+ created = _MOCK_CHAT_RESPONSE [ "created" ] ,
930930 model = "test-model" ,
931931 object = "chat.completion" ,
932932 usage = usage ,
0 commit comments