D:\PythonCode\LangChainTest\.venv\Scripts\python.exe D:\PythonCode\LangChainTest\main.py D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langchain_core\_api\deprecation.py:26: UserWarning: Core Pydantic V1 functionality isn't compatible with Python 3.14 or greater. from pydantic.v1.fields import FieldInfo as FieldInfoV1 Traceback (most recent call last): File "D:\PythonCode\LangChainTest\main.py", line 14, in <module> agent.invoke( ~~~~~~~~~~~~^ {"messages": [{"role": "user", "content": "旧金山天气如何?"}]} ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ) ^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langgraph\pregel\main.py", line 3068, in invoke for chunk in self.stream( ~~~~~~~~~~~^ input, ^^^^^^ ...<10 lines>... **kwargs, ^^^^^^^^^ ): ^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langgraph\pregel\main.py", line 2643, in stream for _ in runner.tick( ~~~~~~~~~~~^ [t for t in loop.tasks.values() if not t.writes], ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ...<2 lines>... schedule_task=loop.accept_push, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ): ^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langgraph\pregel\_runner.py", line 167, in tick run_with_retry( ~~~~~~~~~~~~~~^ t, ^^ ...<10 lines>... }, ^^ ) ^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langgraph\pregel\_retry.py", line 42, in run_with_retry return task.proc.invoke(task.input, config) ~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langgraph\_internal\_runnable.py", line 656, in invoke input = context.run(step.invoke, input, config, **kwargs) File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langgraph\_internal\_runnable.py", line 400, in invoke ret = self.func(*args, **kwargs) File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langchain\agents\factory.py", line 1131, in model_node response = _execute_model_sync(request) File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langchain\agents\factory.py", line 1102, in _execute_model_sync output = model_.invoke(messages) File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 5548, in invoke return self.bound.invoke( ~~~~~~~~~~~~~~~~~^ input, ^^^^^^ self._merge_configs(config), ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ **{**self.kwargs, **kwargs}, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ) ^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langchain_core\language_models\chat_models.py", line 398, in invoke self.generate_prompt( ~~~~~~~~~~~~~~~~~~~~^ [self._convert_input(input)], ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ...<6 lines>... **kwargs, ^^^^^^^^^ ).generations[0][0], ^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langchain_core\language_models\chat_models.py", line 1117, in generate_prompt return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs) ~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langchain_core\language_models\chat_models.py", line 927, in generate self._generate_with_cache( ~~~~~~~~~~~~~~~~~~~~~~~~~^ m, ^^ ...<2 lines>... **kwargs, ^^^^^^^^^ ) ^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langchain_core\language_models\chat_models.py", line 1221, in _generate_with_cache result = self._generate( messages, stop=stop, run_manager=run_manager, **kwargs ) File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langchain_anthropic\chat_models.py", line 1915, in _generate data = self._create(payload) File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\langchain_anthropic\chat_models.py", line 1777, in _create return self._client.messages.create(**payload) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\anthropic\_utils\_utils.py", line 282, in wrapper return func(*args, **kwargs) File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\anthropic\resources\messages\messages.py", line 930, in create return self._post( ~~~~~~~~~~^ "/v1/messages", ^^^^^^^^^^^^^^^ ...<26 lines>... stream_cls=Stream[RawMessageStreamEvent], ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ) ^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\anthropic\_base_client.py", line 1326, in post return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) ~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\anthropic\_base_client.py", line 1035, in request request = self._build_request(options, retries_taken=retries_taken) File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\anthropic\_base_client.py", line 506, in _build_request headers = self._build_headers(options, retries_taken=retries_taken) File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\anthropic\_base_client.py", line 447, in _build_headers self._validate_headers(headers_dict, custom_headers) ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\PythonCode\LangChainTest\.venv\Lib\site-packages\anthropic\_client.py", line 200, in _validate_headers raise TypeError( '"Could not resolve authentication method. Expected either api_key or auth_token to be set. Or for one of the `X-Api-Key` or `Authorization` headers to be explicitly omitted"' ) TypeError: "Could not resolve authentication method. Expected either api_key or auth_token to be set. Or for one of the `X-Api-Key` or `Authorization` headers to be explicitly omitted" During task with name 'model' and id '9c34258b-c90f-d21b-f23b-504e5fe2b3f1' Process finished with exit code 1