Callbacks
+ +
+ BaseCallbackHandler
+
+
+¶
+
+
+
+ Bases: LLMManagerMixin, ChainManagerMixin, ToolManagerMixin, RetrieverManagerMixin, CallbackManagerMixin, RunManagerMixin
+ flowchart TD
+ langchain_core.callbacks.base.BaseCallbackHandler[BaseCallbackHandler]
+ langchain_core.callbacks.base.LLMManagerMixin[LLMManagerMixin]
+ langchain_core.callbacks.base.ChainManagerMixin[ChainManagerMixin]
+ langchain_core.callbacks.base.ToolManagerMixin[ToolManagerMixin]
+ langchain_core.callbacks.base.RetrieverManagerMixin[RetrieverManagerMixin]
+ langchain_core.callbacks.base.CallbackManagerMixin[CallbackManagerMixin]
+ langchain_core.callbacks.base.RunManagerMixin[RunManagerMixin]
+
+ langchain_core.callbacks.base.LLMManagerMixin --> langchain_core.callbacks.base.BaseCallbackHandler
+
+ langchain_core.callbacks.base.ChainManagerMixin --> langchain_core.callbacks.base.BaseCallbackHandler
+
+ langchain_core.callbacks.base.ToolManagerMixin --> langchain_core.callbacks.base.BaseCallbackHandler
+
+ langchain_core.callbacks.base.RetrieverManagerMixin --> langchain_core.callbacks.base.BaseCallbackHandler
+
+ langchain_core.callbacks.base.CallbackManagerMixin --> langchain_core.callbacks.base.BaseCallbackHandler
+
+ langchain_core.callbacks.base.RunManagerMixin --> langchain_core.callbacks.base.BaseCallbackHandler
+
+
+
+ click langchain_core.callbacks.base.BaseCallbackHandler href "" "langchain_core.callbacks.base.BaseCallbackHandler"
+ click langchain_core.callbacks.base.LLMManagerMixin href "" "langchain_core.callbacks.base.LLMManagerMixin"
+ click langchain_core.callbacks.base.ChainManagerMixin href "" "langchain_core.callbacks.base.ChainManagerMixin"
+ click langchain_core.callbacks.base.ToolManagerMixin href "" "langchain_core.callbacks.base.ToolManagerMixin"
+ click langchain_core.callbacks.base.RetrieverManagerMixin href "" "langchain_core.callbacks.base.RetrieverManagerMixin"
+ click langchain_core.callbacks.base.CallbackManagerMixin href "" "langchain_core.callbacks.base.CallbackManagerMixin"
+ click langchain_core.callbacks.base.RunManagerMixin href "" "langchain_core.callbacks.base.RunManagerMixin"
+
+
+
+
+ Base callback handler.
+ + + + + + + + + + +| METHOD | +DESCRIPTION | +
|---|---|
on_text |
+
+
+
+ Run on an arbitrary text. + |
+
on_retry |
+
+
+
+ Run on a retry event. + |
+
on_custom_event |
+
+
+
+ Override to define a handler for a custom event. + |
+
on_llm_start |
+
+
+
+ Run when LLM starts running. + |
+
on_chat_model_start |
+
+
+
+ Run when a chat model starts running. + |
+
on_retriever_start |
+
+
+
+ Run when the |
+
on_chain_start |
+
+
+
+ Run when a chain starts running. + |
+
on_tool_start |
+
+
+
+ Run when the tool starts running. + |
+
on_retriever_error |
+
+
+
+ Run when |
+
on_retriever_end |
+
+
+
+ Run when |
+
on_tool_end |
+
+
+
+ Run when the tool ends running. + |
+
on_tool_error |
+
+
+
+ Run when tool errors. + |
+
on_chain_end |
+
+
+
+ Run when chain ends running. + |
+
on_chain_error |
+
+
+
+ Run when chain errors. + |
+
on_agent_action |
+
+
+
+ Run on agent action. + |
+
on_agent_finish |
+
+
+
+ Run on the agent end. + |
+
on_llm_new_token |
+
+
+
+ Run on new output token. + |
+
on_llm_end |
+
+
+
+ Run when LLM ends running. + |
+
on_llm_error |
+
+
+
+ Run when LLM errors. + |
+
+ raise_error
+
+
+
+ class-attribute
+ instance-attribute
+
+
+¶
+raise_error: bool = False
+Whether to raise an error if an exception occurs.
+ + +
+ run_inline
+
+
+
+ class-attribute
+ instance-attribute
+
+
+¶
+run_inline: bool = False
+Whether to run the callback inline.
+ + +
+ ignore_llm
+
+
+
+ property
+
+
+¶
+ignore_llm: bool
+Whether to ignore LLM callbacks.
+ + +
+ ignore_retry
+
+
+
+ property
+
+
+¶
+ignore_retry: bool
+Whether to ignore retry callbacks.
+ + +
+ ignore_chain
+
+
+
+ property
+
+
+¶
+ignore_chain: bool
+Whether to ignore chain callbacks.
+ + +
+ ignore_agent
+
+
+
+ property
+
+
+¶
+ignore_agent: bool
+Whether to ignore agent callbacks.
+ + +
+ ignore_retriever
+
+
+
+ property
+
+
+¶
+ignore_retriever: bool
+Whether to ignore retriever callbacks.
+ + +
+ ignore_chat_model
+
+
+
+ property
+
+
+¶
+ignore_chat_model: bool
+Whether to ignore chat model callbacks.
+ + +
+ ignore_custom_event
+
+
+
+ property
+
+
+¶
+ignore_custom_event: bool
+Ignore custom event.
+ + +
+ on_text
+
+
+¶
+
+
+ Run on an arbitrary text.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ text
+ |
+
+
+
+ The text. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retry
+
+
+¶
+on_retry(
+ retry_state: RetryCallState,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run on a retry event.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ retry_state
+ |
+
+
+
+ The retry state. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_custom_event
+
+
+¶
+on_custom_event(
+ name: str,
+ data: Any,
+ *,
+ run_id: UUID,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Override to define a handler for a custom event.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ name
+ |
+
+
+
+ The name of the custom event. +
+
+ TYPE:
+ |
+
+ data
+ |
+
+
+
+ The data for the custom event. +Format will match the format specified by the user. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags associated with the custom event (includes inherited tags). + |
+
+ metadata
+ |
+
+
+
+
+ The metadata associated with the custom event (includes inherited +metadata). + |
+
+ on_llm_start
+
+
+¶
+on_llm_start(
+ serialized: dict[str, Any],
+ prompts: list[str],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when LLM starts running.
+Warning
+This method is called for non-chat models (regular text completion LLMs). If
+you're implementing a handler for a chat model, you should use
+on_chat_model_start instead.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized LLM. + |
+
+ prompts
+ |
+
+
+
+
+ The prompts. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chat_model_start
+
+
+¶
+on_chat_model_start(
+ serialized: dict[str, Any],
+ messages: list[list[BaseMessage]],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when a chat model starts running.
+Warning
+This method is called for chat models. If you're implementing a handler for
+a non-chat model, you should use on_llm_start instead.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chat model. + |
+
+ messages
+ |
+
+
+
+ The messages. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retriever_start
+
+
+¶
+on_retriever_start(
+ serialized: dict[str, Any],
+ query: str,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when the Retriever starts running.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized |
+
+ query
+ |
+
+
+
+ The query. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chain_start
+
+
+¶
+on_chain_start(
+ serialized: dict[str, Any],
+ inputs: dict[str, Any],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when a chain starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chain. + |
+
+ inputs
+ |
+
+
+
+
+ The inputs. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_tool_start
+
+
+¶
+on_tool_start(
+ serialized: dict[str, Any],
+ input_str: str,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ inputs: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when the tool starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chain. + |
+
+ input_str
+ |
+
+
+
+ The input string. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ inputs
+ |
+
+
+
+
+ The inputs. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retriever_error
+
+
+¶
+on_retriever_error(
+ error: BaseException,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run when Retriever errors.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ error
+ |
+
+
+
+ The error that occurred. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retriever_end
+
+
+¶
+on_retriever_end(
+ documents: Sequence[Document],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run when Retriever ends running.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ documents
+ |
+
+
+
+
+ The documents retrieved. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_tool_end
+
+
+¶
+on_tool_end(
+ output: Any, *, run_id: UUID, parent_run_id: UUID | None = None, **kwargs: Any
+) -> Any
+Run when the tool ends running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ output
+ |
+
+
+
+ The output of the tool. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_tool_error
+
+
+¶
+on_tool_error(
+ error: BaseException,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run when tool errors.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ error
+ |
+
+
+
+ The error that occurred. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chain_end
+
+
+¶
+on_chain_end(
+ outputs: dict[str, Any],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run when chain ends running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ outputs
+ |
+
+
+
+
+ The outputs of the chain. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chain_error
+
+
+¶
+on_chain_error(
+ error: BaseException,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run when chain errors.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ error
+ |
+
+
+
+ The error that occurred. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_agent_action
+
+
+¶
+on_agent_action(
+ action: AgentAction,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run on agent action.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ action
+ |
+
+
+
+ The agent action. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_agent_finish
+
+
+¶
+on_agent_finish(
+ finish: AgentFinish,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run on the agent end.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ finish
+ |
+
+
+
+ The agent finish. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_llm_new_token
+
+
+¶
+on_llm_new_token(
+ token: str,
+ *,
+ chunk: GenerationChunk | ChatGenerationChunk | None = None,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> Any
+Run on new output token.
+Only available when streaming is enabled.
+For both chat models and non-chat models (legacy text completion LLMs).
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ token
+ |
+
+
+
+ The new token. +
+
+ TYPE:
+ |
+
+ chunk
+ |
+
+
+
+ The new generated chunk, containing content and other information. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_llm_end
+
+
+¶
+on_llm_end(
+ response: LLMResult,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when LLM ends running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ response
+ |
+
+
+
+ The response which was generated. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_llm_error
+
+
+¶
+on_llm_error(
+ error: BaseException,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when LLM errors.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ error
+ |
+
+
+
+ The error that occurred. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ AsyncCallbackHandler
+
+
+¶
+
+
+
+ Bases: BaseCallbackHandler
Base async callback handler.
+ + + + + + + + + + +| METHOD | +DESCRIPTION | +
|---|---|
on_llm_start |
+
+
+
+ Run when the model starts running. + |
+
on_chat_model_start |
+
+
+
+ Run when a chat model starts running. + |
+
on_llm_new_token |
+
+
+
+ Run on new output token. Only available when streaming is enabled. + |
+
on_llm_end |
+
+
+
+ Run when the model ends running. + |
+
on_llm_error |
+
+
+
+ Run when LLM errors. + |
+
on_chain_start |
+
+
+
+ Run when a chain starts running. + |
+
on_chain_end |
+
+
+
+ Run when a chain ends running. + |
+
on_chain_error |
+
+
+
+ Run when chain errors. + |
+
on_tool_start |
+
+
+
+ Run when the tool starts running. + |
+
on_tool_end |
+
+
+
+ Run when the tool ends running. + |
+
on_tool_error |
+
+
+
+ Run when tool errors. + |
+
on_text |
+
+
+
+ Run on an arbitrary text. + |
+
on_retry |
+
+
+
+ Run on a retry event. + |
+
on_agent_action |
+
+
+
+ Run on agent action. + |
+
on_agent_finish |
+
+
+
+ Run on the agent end. + |
+
on_retriever_start |
+
+
+
+ Run on the retriever start. + |
+
on_retriever_end |
+
+
+
+ Run on the retriever end. + |
+
on_retriever_error |
+
+
+
+ Run on retriever error. + |
+
on_custom_event |
+
+
+
+ Override to define a handler for custom events. + |
+
+ raise_error
+
+
+
+ class-attribute
+ instance-attribute
+
+
+¶
+raise_error: bool = False
+Whether to raise an error if an exception occurs.
+ + +
+ run_inline
+
+
+
+ class-attribute
+ instance-attribute
+
+
+¶
+run_inline: bool = False
+Whether to run the callback inline.
+ + +
+ ignore_llm
+
+
+
+ property
+
+
+¶
+ignore_llm: bool
+Whether to ignore LLM callbacks.
+ + +
+ ignore_retry
+
+
+
+ property
+
+
+¶
+ignore_retry: bool
+Whether to ignore retry callbacks.
+ + +
+ ignore_chain
+
+
+
+ property
+
+
+¶
+ignore_chain: bool
+Whether to ignore chain callbacks.
+ + +
+ ignore_agent
+
+
+
+ property
+
+
+¶
+ignore_agent: bool
+Whether to ignore agent callbacks.
+ + +
+ ignore_retriever
+
+
+
+ property
+
+
+¶
+ignore_retriever: bool
+Whether to ignore retriever callbacks.
+ + +
+ ignore_chat_model
+
+
+
+ property
+
+
+¶
+ignore_chat_model: bool
+Whether to ignore chat model callbacks.
+ + +
+ ignore_custom_event
+
+
+
+ property
+
+
+¶
+ignore_custom_event: bool
+Ignore custom event.
+ + +
+ on_llm_start
+
+
+
+ async
+
+
+¶
+on_llm_start(
+ serialized: dict[str, Any],
+ prompts: list[str],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> None
+Run when the model starts running.
+Warning
+This method is called for non-chat models (regular text completion LLMs). If
+you're implementing a handler for a chat model, you should use
+on_chat_model_start instead.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized LLM. + |
+
+ prompts
+ |
+
+
+
+
+ The prompts. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chat_model_start
+
+
+
+ async
+
+
+¶
+on_chat_model_start(
+ serialized: dict[str, Any],
+ messages: list[list[BaseMessage]],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when a chat model starts running.
+Warning
+This method is called for chat models. If you're implementing a handler for
+a non-chat model, you should use on_llm_start instead.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chat model. + |
+
+ messages
+ |
+
+
+
+ The messages. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_llm_new_token
+
+
+
+ async
+
+
+¶
+on_llm_new_token(
+ token: str,
+ *,
+ chunk: GenerationChunk | ChatGenerationChunk | None = None,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> None
+Run on new output token. Only available when streaming is enabled.
+For both chat models and non-chat models (legacy text completion LLMs).
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ token
+ |
+
+
+
+ The new token. +
+
+ TYPE:
+ |
+
+ chunk
+ |
+
+
+
+ The new generated chunk, containing content and other information. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_llm_end
+
+
+
+ async
+
+
+¶
+on_llm_end(
+ response: LLMResult,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> None
+Run when the model ends running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ response
+ |
+
+
+
+ The response which was generated. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_llm_error
+
+
+
+ async
+
+
+¶
+on_llm_error(
+ error: BaseException,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> None
+Run when LLM errors.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ error
+ |
+
+
+
+ The error that occurred. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chain_start
+
+
+
+ async
+
+
+¶
+on_chain_start(
+ serialized: dict[str, Any],
+ inputs: dict[str, Any],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> None
+Run when a chain starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chain. + |
+
+ inputs
+ |
+
+
+
+
+ The inputs. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chain_end
+
+
+
+ async
+
+
+¶
+on_chain_end(
+ outputs: dict[str, Any],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> None
+Run when a chain ends running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ outputs
+ |
+
+
+
+
+ The outputs of the chain. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chain_error
+
+
+
+ async
+
+
+¶
+on_chain_error(
+ error: BaseException,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> None
+Run when chain errors.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ error
+ |
+
+
+
+ The error that occurred. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_tool_start
+
+
+
+ async
+
+
+¶
+on_tool_start(
+ serialized: dict[str, Any],
+ input_str: str,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ inputs: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> None
+Run when the tool starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized tool. + |
+
+ input_str
+ |
+
+
+
+ The input string. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ inputs
+ |
+
+
+
+
+ The inputs. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_tool_end
+
+
+
+ async
+
+
+¶
+on_tool_end(
+ output: Any,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> None
+Run when the tool ends running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ output
+ |
+
+
+
+ The output of the tool. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_tool_error
+
+
+
+ async
+
+
+¶
+on_tool_error(
+ error: BaseException,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> None
+Run when tool errors.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ error
+ |
+
+
+
+ The error that occurred. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_text
+
+
+
+ async
+
+
+¶
+on_text(
+ text: str,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> None
+Run on an arbitrary text.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ text
+ |
+
+
+
+ The text. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retry
+
+
+
+ async
+
+
+¶
+on_retry(
+ retry_state: RetryCallState,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run on a retry event.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ retry_state
+ |
+
+
+
+ The retry state. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_agent_action
+
+
+
+ async
+
+
+¶
+on_agent_action(
+ action: AgentAction,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> None
+Run on agent action.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ action
+ |
+
+
+
+ The agent action. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_agent_finish
+
+
+
+ async
+
+
+¶
+on_agent_finish(
+ finish: AgentFinish,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> None
+Run on the agent end.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ finish
+ |
+
+
+
+ The agent finish. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retriever_start
+
+
+
+ async
+
+
+¶
+on_retriever_start(
+ serialized: dict[str, Any],
+ query: str,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> None
+Run on the retriever start.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized retriever. + |
+
+ query
+ |
+
+
+
+ The query. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retriever_end
+
+
+
+ async
+
+
+¶
+on_retriever_end(
+ documents: Sequence[Document],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> None
+Run on the retriever end.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ documents
+ |
+
+
+
+
+ The documents retrieved. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retriever_error
+
+
+
+ async
+
+
+¶
+on_retriever_error(
+ error: BaseException,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> None
+Run on retriever error.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ error
+ |
+
+
+
+ The error that occurred. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_custom_event
+
+
+
+ async
+
+
+¶
+on_custom_event(
+ name: str,
+ data: Any,
+ *,
+ run_id: UUID,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> None
+Override to define a handler for custom events.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ name
+ |
+
+
+
+ The name of the custom event. +
+
+ TYPE:
+ |
+
+ data
+ |
+
+
+
+ The data for the custom event. +Format will match the format specified by the user. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags associated with the custom event (includes inherited tags). + |
+
+ metadata
+ |
+
+
+
+
+ The metadata associated with the custom event (includes inherited +metadata). + |
+
+ BaseCallbackManager
+
+
+¶
+
+
+
+ Bases: CallbackManagerMixin
Base callback manager.
+ + + + + + + + + + +| METHOD | +DESCRIPTION | +
|---|---|
on_llm_start |
+
+
+
+ Run when LLM starts running. + |
+
on_chat_model_start |
+
+
+
+ Run when a chat model starts running. + |
+
on_retriever_start |
+
+
+
+ Run when the |
+
on_chain_start |
+
+
+
+ Run when a chain starts running. + |
+
on_tool_start |
+
+
+
+ Run when the tool starts running. + |
+
__init__ |
+
+
+
+ Initialize callback manager. + |
+
copy |
+
+
+
+ Return a copy of the callback manager. + |
+
merge |
+
+
+
+ Merge the callback manager with another callback manager. + |
+
add_handler |
+
+
+
+ Add a handler to the callback manager. + |
+
remove_handler |
+
+
+
+ Remove a handler from the callback manager. + |
+
set_handlers |
+
+
+
+ Set handlers as the only handlers on the callback manager. + |
+
set_handler |
+
+
+
+ Set handler as the only handler on the callback manager. + |
+
add_tags |
+
+
+
+ Add tags to the callback manager. + |
+
remove_tags |
+
+
+
+ Remove tags from the callback manager. + |
+
add_metadata |
+
+
+
+ Add metadata to the callback manager. + |
+
remove_metadata |
+
+
+
+ Remove metadata from the callback manager. + |
+
+ is_async
+
+
+
+ property
+
+
+¶
+is_async: bool
+Whether the callback manager is async.
+ + +
+ on_llm_start
+
+
+¶
+on_llm_start(
+ serialized: dict[str, Any],
+ prompts: list[str],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when LLM starts running.
+Warning
+This method is called for non-chat models (regular text completion LLMs). If
+you're implementing a handler for a chat model, you should use
+on_chat_model_start instead.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized LLM. + |
+
+ prompts
+ |
+
+
+
+
+ The prompts. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chat_model_start
+
+
+¶
+on_chat_model_start(
+ serialized: dict[str, Any],
+ messages: list[list[BaseMessage]],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when a chat model starts running.
+Warning
+This method is called for chat models. If you're implementing a handler for
+a non-chat model, you should use on_llm_start instead.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chat model. + |
+
+ messages
+ |
+
+
+
+ The messages. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retriever_start
+
+
+¶
+on_retriever_start(
+ serialized: dict[str, Any],
+ query: str,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when the Retriever starts running.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized |
+
+ query
+ |
+
+
+
+ The query. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chain_start
+
+
+¶
+on_chain_start(
+ serialized: dict[str, Any],
+ inputs: dict[str, Any],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when a chain starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chain. + |
+
+ inputs
+ |
+
+
+
+
+ The inputs. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_tool_start
+
+
+¶
+on_tool_start(
+ serialized: dict[str, Any],
+ input_str: str,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ inputs: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when the tool starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chain. + |
+
+ input_str
+ |
+
+
+
+ The input string. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ inputs
+ |
+
+
+
+
+ The inputs. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ __init__
+
+
+¶
+__init__(
+ handlers: list[BaseCallbackHandler],
+ inheritable_handlers: list[BaseCallbackHandler] | None = None,
+ parent_run_id: UUID | None = None,
+ *,
+ tags: list[str] | None = None,
+ inheritable_tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ inheritable_metadata: dict[str, Any] | None = None,
+) -> None
+Initialize callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handlers
+ |
+
+
+
+ The handlers. +
+
+ TYPE:
+ |
+
+ inheritable_handlers
+ |
+
+
+
+ The inheritable handlers. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The parent run ID. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ inheritable_tags
+ |
+
+
+
+
+ The inheritable tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ inheritable_metadata
+ |
+
+
+
+
+ The inheritable metadata. + |
+
+ merge
+
+
+¶
+merge(other: BaseCallbackManager) -> Self
+Merge the callback manager with another callback manager.
+May be overwritten in subclasses.
+Primarily used internally within merge_configs.
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ Self
+
+ |
+
+
+
+ The merged callback manager of the same type as the current object. + |
+
Example
+# Merging two callback managers`
+from langchain_core.callbacks.manager import (
+ CallbackManager,
+ trace_as_chain_group,
+)
+from langchain_core.callbacks.stdout import StdOutCallbackHandler
+
+manager = CallbackManager(handlers=[StdOutCallbackHandler()], tags=["tag2"])
+with trace_as_chain_group("My Group Name", tags=["tag1"]) as group_manager:
+ merged_manager = group_manager.merge(manager)
+ print(merged_manager.handlers)
+ # [
+ # <langchain_core.callbacks.stdout.StdOutCallbackHandler object at ...>,
+ # <langchain_core.callbacks.streaming_stdout.StreamingStdOutCallbackHandler object at ...>,
+ # ]
+
+ print(merged_manager.tags)
+ # ['tag2', 'tag1']
+
+ add_handler
+
+
+¶
+add_handler(handler: BaseCallbackHandler, inherit: bool = True) -> None
+Add a handler to the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handler
+ |
+
+
+
+ The handler to add. +
+
+ TYPE:
+ |
+
+ inherit
+ |
+
+
+
+ Whether to inherit the handler. +
+
+ TYPE:
+ |
+
+ remove_handler
+
+
+¶
+remove_handler(handler: BaseCallbackHandler) -> None
+Remove a handler from the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handler
+ |
+
+
+
+ The handler to remove. +
+
+ TYPE:
+ |
+
+ set_handlers
+
+
+¶
+set_handlers(handlers: list[BaseCallbackHandler], inherit: bool = True) -> None
+Set handlers as the only handlers on the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handlers
+ |
+
+
+
+ The handlers to set. +
+
+ TYPE:
+ |
+
+ inherit
+ |
+
+
+
+ Whether to inherit the handlers. +
+
+ TYPE:
+ |
+
+ set_handler
+
+
+¶
+set_handler(handler: BaseCallbackHandler, inherit: bool = True) -> None
+Set handler as the only handler on the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handler
+ |
+
+
+
+ The handler to set. +
+
+ TYPE:
+ |
+
+ inherit
+ |
+
+
+
+ Whether to inherit the handler. +
+
+ TYPE:
+ |
+
+ add_tags
+
+
+¶
+
+
+
+
+
+ remove_tags
+
+
+¶
+
+
+
+
+
+ add_metadata
+
+
+¶
+
+
+
+
+
+ CallbackManager
+
+
+¶
+
+
+
+ Bases: BaseCallbackManager
Callback manager for LangChain.
+ + + + + + + + + + +| METHOD | +DESCRIPTION | +
|---|---|
on_llm_start |
+
+
+
+ Run when LLM starts running. + |
+
on_chat_model_start |
+
+
+
+ Run when chat model starts running. + |
+
on_chain_start |
+
+
+
+ Run when chain starts running. + |
+
on_tool_start |
+
+
+
+ Run when tool starts running. + |
+
on_retriever_start |
+
+
+
+ Run when the retriever starts running. + |
+
on_custom_event |
+
+
+
+ Dispatch an adhoc event to the handlers (async version). + |
+
configure |
+
+
+
+ Configure the callback manager. + |
+
__init__ |
+
+
+
+ Initialize callback manager. + |
+
copy |
+
+
+
+ Return a copy of the callback manager. + |
+
merge |
+
+
+
+ Merge the callback manager with another callback manager. + |
+
add_handler |
+
+
+
+ Add a handler to the callback manager. + |
+
remove_handler |
+
+
+
+ Remove a handler from the callback manager. + |
+
set_handlers |
+
+
+
+ Set handlers as the only handlers on the callback manager. + |
+
set_handler |
+
+
+
+ Set handler as the only handler on the callback manager. + |
+
add_tags |
+
+
+
+ Add tags to the callback manager. + |
+
remove_tags |
+
+
+
+ Remove tags from the callback manager. + |
+
add_metadata |
+
+
+
+ Add metadata to the callback manager. + |
+
remove_metadata |
+
+
+
+ Remove metadata from the callback manager. + |
+
+ is_async
+
+
+
+ property
+
+
+¶
+is_async: bool
+Whether the callback manager is async.
+ + +
+ on_llm_start
+
+
+¶
+on_llm_start(
+ serialized: dict[str, Any],
+ prompts: list[str],
+ run_id: UUID | None = None,
+ **kwargs: Any,
+) -> list[CallbackManagerForLLMRun]
+Run when LLM starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized LLM. + |
+
+ prompts
+ |
+
+
+
+
+ The list of prompts. + |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ list[CallbackManagerForLLMRun]
+
+ |
+
+
+
+ A callback manager for each prompt as an LLM run. + |
+
+ on_chat_model_start
+
+
+¶
+on_chat_model_start(
+ serialized: dict[str, Any],
+ messages: list[list[BaseMessage]],
+ run_id: UUID | None = None,
+ **kwargs: Any,
+) -> list[CallbackManagerForLLMRun]
+Run when chat model starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized LLM. + |
+
+ messages
+ |
+
+
+
+ The list of messages. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ list[CallbackManagerForLLMRun]
+
+ |
+
+
+
+ A callback manager for each list of messages as an LLM run. + |
+
+ on_chain_start
+
+
+¶
+on_chain_start(
+ serialized: dict[str, Any] | None,
+ inputs: dict[str, Any] | Any,
+ run_id: UUID | None = None,
+ **kwargs: Any,
+) -> CallbackManagerForChainRun
+Run when chain starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chain. + |
+
+ inputs
+ |
+
+
+
+
+ The inputs to the chain. + |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ CallbackManagerForChainRun
+
+ |
+
+
+
+ The callback manager for the chain run. + |
+
+ on_tool_start
+
+
+¶
+on_tool_start(
+ serialized: dict[str, Any] | None,
+ input_str: str,
+ run_id: UUID | None = None,
+ parent_run_id: UUID | None = None,
+ inputs: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> CallbackManagerForToolRun
+Run when tool starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ Serialized representation of the tool. + |
+
+ input_str
+ |
+
+
+
+ The input to the tool as a string. +Non-string inputs are cast to strings. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ ID for the run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ inputs
+ |
+
+
+
+
+ The original input to the tool if provided. +Recommended for usage instead of input_str when the original input is +needed. +If provided, the inputs are expected to be formatted as a dict. The keys +will correspond to the named-arguments in the tool. + |
+
+ **kwargs
+ |
+
+
+
+ The keyword arguments to pass to the event handler +
+
+ TYPE:
+ |
+
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ CallbackManagerForToolRun
+
+ |
+
+
+
+ The callback manager for the tool run. + |
+
+ on_retriever_start
+
+
+¶
+on_retriever_start(
+ serialized: dict[str, Any] | None,
+ query: str,
+ run_id: UUID | None = None,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> CallbackManagerForRetrieverRun
+Run when the retriever starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized retriever. + |
+
+ query
+ |
+
+
+
+ The query. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ CallbackManagerForRetrieverRun
+
+ |
+
+
+
+ The callback manager for the retriever run. + |
+
+ on_custom_event
+
+
+¶
+
+
+ Dispatch an adhoc event to the handlers (async version).
+This event should NOT be used in any internal LangChain code. The event is meant +specifically for users of the library to dispatch custom events that are +tailored to their application.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ name
+ |
+
+
+
+ The name of the adhoc event. +
+
+ TYPE:
+ |
+
+ data
+ |
+
+
+
+ The data for the adhoc event. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
| RAISES | +DESCRIPTION | +
|---|---|
+
+ ValueError
+
+ |
+
+
+
+ If additional keyword arguments are passed. + |
+
+ configure
+
+
+
+ classmethod
+
+
+¶
+configure(
+ inheritable_callbacks: Callbacks = None,
+ local_callbacks: Callbacks = None,
+ verbose: bool = False,
+ inheritable_tags: list[str] | None = None,
+ local_tags: list[str] | None = None,
+ inheritable_metadata: dict[str, Any] | None = None,
+ local_metadata: dict[str, Any] | None = None,
+) -> CallbackManager
+Configure the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ inheritable_callbacks
+ |
+
+
+
+ The inheritable callbacks. +
+
+ TYPE:
+ |
+
+ local_callbacks
+ |
+
+
+
+ The local callbacks. +
+
+ TYPE:
+ |
+
+ verbose
+ |
+
+
+
+ Whether to enable verbose mode. +
+
+ TYPE:
+ |
+
+ inheritable_tags
+ |
+
+
+
+
+ The inheritable tags. + |
+
+ local_tags
+ |
+
+
+
+
+ The local tags. + |
+
+ inheritable_metadata
+ |
+
+
+
+
+ The inheritable metadata. + |
+
+ local_metadata
+ |
+
+
+
+
+ The local metadata. + |
+
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ CallbackManager
+
+ |
+
+
+
+ The configured callback manager. + |
+
+ __init__
+
+
+¶
+__init__(
+ handlers: list[BaseCallbackHandler],
+ inheritable_handlers: list[BaseCallbackHandler] | None = None,
+ parent_run_id: UUID | None = None,
+ *,
+ tags: list[str] | None = None,
+ inheritable_tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ inheritable_metadata: dict[str, Any] | None = None,
+) -> None
+Initialize callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handlers
+ |
+
+
+
+ The handlers. +
+
+ TYPE:
+ |
+
+ inheritable_handlers
+ |
+
+
+
+ The inheritable handlers. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The parent run ID. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ inheritable_tags
+ |
+
+
+
+
+ The inheritable tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ inheritable_metadata
+ |
+
+
+
+
+ The inheritable metadata. + |
+
+ merge
+
+
+¶
+merge(other: BaseCallbackManager) -> Self
+Merge the callback manager with another callback manager.
+May be overwritten in subclasses.
+Primarily used internally within merge_configs.
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ Self
+
+ |
+
+
+
+ The merged callback manager of the same type as the current object. + |
+
Example
+# Merging two callback managers`
+from langchain_core.callbacks.manager import (
+ CallbackManager,
+ trace_as_chain_group,
+)
+from langchain_core.callbacks.stdout import StdOutCallbackHandler
+
+manager = CallbackManager(handlers=[StdOutCallbackHandler()], tags=["tag2"])
+with trace_as_chain_group("My Group Name", tags=["tag1"]) as group_manager:
+ merged_manager = group_manager.merge(manager)
+ print(merged_manager.handlers)
+ # [
+ # <langchain_core.callbacks.stdout.StdOutCallbackHandler object at ...>,
+ # <langchain_core.callbacks.streaming_stdout.StreamingStdOutCallbackHandler object at ...>,
+ # ]
+
+ print(merged_manager.tags)
+ # ['tag2', 'tag1']
+
+ add_handler
+
+
+¶
+add_handler(handler: BaseCallbackHandler, inherit: bool = True) -> None
+Add a handler to the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handler
+ |
+
+
+
+ The handler to add. +
+
+ TYPE:
+ |
+
+ inherit
+ |
+
+
+
+ Whether to inherit the handler. +
+
+ TYPE:
+ |
+
+ remove_handler
+
+
+¶
+remove_handler(handler: BaseCallbackHandler) -> None
+Remove a handler from the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handler
+ |
+
+
+
+ The handler to remove. +
+
+ TYPE:
+ |
+
+ set_handlers
+
+
+¶
+set_handlers(handlers: list[BaseCallbackHandler], inherit: bool = True) -> None
+Set handlers as the only handlers on the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handlers
+ |
+
+
+
+ The handlers to set. +
+
+ TYPE:
+ |
+
+ inherit
+ |
+
+
+
+ Whether to inherit the handlers. +
+
+ TYPE:
+ |
+
+ set_handler
+
+
+¶
+set_handler(handler: BaseCallbackHandler, inherit: bool = True) -> None
+Set handler as the only handler on the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handler
+ |
+
+
+
+ The handler to set. +
+
+ TYPE:
+ |
+
+ inherit
+ |
+
+
+
+ Whether to inherit the handler. +
+
+ TYPE:
+ |
+
+ add_tags
+
+
+¶
+
+
+
+
+
+ remove_tags
+
+
+¶
+
+
+
+
+
+ add_metadata
+
+
+¶
+
+
+
+
+
+ AsyncCallbackManager
+
+
+¶
+
+
+
+ Bases: BaseCallbackManager
Async callback manager that handles callbacks from LangChain.
+ + + + + + + + + + +| METHOD | +DESCRIPTION | +
|---|---|
on_llm_start |
+
+
+
+ Run when LLM starts running. + |
+
on_chat_model_start |
+
+
+
+ Async run when LLM starts running. + |
+
on_chain_start |
+
+
+
+ Async run when chain starts running. + |
+
on_tool_start |
+
+
+
+ Run when the tool starts running. + |
+
on_custom_event |
+
+
+
+ Dispatch an adhoc event to the handlers (async version). + |
+
on_retriever_start |
+
+
+
+ Run when the retriever starts running. + |
+
configure |
+
+
+
+ Configure the async callback manager. + |
+
__init__ |
+
+
+
+ Initialize callback manager. + |
+
copy |
+
+
+
+ Return a copy of the callback manager. + |
+
merge |
+
+
+
+ Merge the callback manager with another callback manager. + |
+
add_handler |
+
+
+
+ Add a handler to the callback manager. + |
+
remove_handler |
+
+
+
+ Remove a handler from the callback manager. + |
+
set_handlers |
+
+
+
+ Set handlers as the only handlers on the callback manager. + |
+
set_handler |
+
+
+
+ Set handler as the only handler on the callback manager. + |
+
add_tags |
+
+
+
+ Add tags to the callback manager. + |
+
remove_tags |
+
+
+
+ Remove tags from the callback manager. + |
+
add_metadata |
+
+
+
+ Add metadata to the callback manager. + |
+
remove_metadata |
+
+
+
+ Remove metadata from the callback manager. + |
+
+ is_async
+
+
+
+ property
+
+
+¶
+is_async: bool
+Return whether the handler is async.
+ + +
+ on_llm_start
+
+
+
+ async
+
+
+¶
+on_llm_start(
+ serialized: dict[str, Any],
+ prompts: list[str],
+ run_id: UUID | None = None,
+ **kwargs: Any,
+) -> list[AsyncCallbackManagerForLLMRun]
+Run when LLM starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized LLM. + |
+
+ prompts
+ |
+
+
+
+
+ The list of prompts. + |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ list[AsyncCallbackManagerForLLMRun]
+
+ |
+
+
+
+ The list of async callback managers, one for each LLM run corresponding to + |
+
+
+ list[AsyncCallbackManagerForLLMRun]
+
+ |
+
+
+
+ each prompt. + |
+
+ on_chat_model_start
+
+
+
+ async
+
+
+¶
+on_chat_model_start(
+ serialized: dict[str, Any],
+ messages: list[list[BaseMessage]],
+ run_id: UUID | None = None,
+ **kwargs: Any,
+) -> list[AsyncCallbackManagerForLLMRun]
+Async run when LLM starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized LLM. + |
+
+ messages
+ |
+
+
+
+ The list of messages. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ list[AsyncCallbackManagerForLLMRun]
+
+ |
+
+
+
+ The list of async callback managers, one for each LLM run corresponding to + |
+
+
+ list[AsyncCallbackManagerForLLMRun]
+
+ |
+
+
+
+ each inner message list. + |
+
+ on_chain_start
+
+
+
+ async
+
+
+¶
+on_chain_start(
+ serialized: dict[str, Any] | None,
+ inputs: dict[str, Any] | Any,
+ run_id: UUID | None = None,
+ **kwargs: Any,
+) -> AsyncCallbackManagerForChainRun
+Async run when chain starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chain. + |
+
+ inputs
+ |
+
+
+
+
+ The inputs to the chain. + |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ AsyncCallbackManagerForChainRun
+
+ |
+
+
+
+ The async callback manager for the chain run. + |
+
+ on_tool_start
+
+
+
+ async
+
+
+¶
+on_tool_start(
+ serialized: dict[str, Any] | None,
+ input_str: str,
+ run_id: UUID | None = None,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> AsyncCallbackManagerForToolRun
+Run when the tool starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized tool. + |
+
+ input_str
+ |
+
+
+
+ The input to the tool. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ AsyncCallbackManagerForToolRun
+
+ |
+
+
+
+ The async callback manager for the tool run. + |
+
+ on_custom_event
+
+
+
+ async
+
+
+¶
+
+
+ Dispatch an adhoc event to the handlers (async version).
+This event should NOT be used in any internal LangChain code. The event is meant +specifically for users of the library to dispatch custom events that are +tailored to their application.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ name
+ |
+
+
+
+ The name of the adhoc event. +
+
+ TYPE:
+ |
+
+ data
+ |
+
+
+
+ The data for the adhoc event. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
| RAISES | +DESCRIPTION | +
|---|---|
+
+ ValueError
+
+ |
+
+
+
+ If additional keyword arguments are passed. + |
+
+ on_retriever_start
+
+
+
+ async
+
+
+¶
+on_retriever_start(
+ serialized: dict[str, Any] | None,
+ query: str,
+ run_id: UUID | None = None,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> AsyncCallbackManagerForRetrieverRun
+Run when the retriever starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized retriever. + |
+
+ query
+ |
+
+
+
+ The query. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ AsyncCallbackManagerForRetrieverRun
+
+ |
+
+
+
+ The async callback manager for the retriever run. + |
+
+ configure
+
+
+
+ classmethod
+
+
+¶
+configure(
+ inheritable_callbacks: Callbacks = None,
+ local_callbacks: Callbacks = None,
+ verbose: bool = False,
+ inheritable_tags: list[str] | None = None,
+ local_tags: list[str] | None = None,
+ inheritable_metadata: dict[str, Any] | None = None,
+ local_metadata: dict[str, Any] | None = None,
+) -> AsyncCallbackManager
+Configure the async callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ inheritable_callbacks
+ |
+
+
+
+ The inheritable callbacks. +
+
+ TYPE:
+ |
+
+ local_callbacks
+ |
+
+
+
+ The local callbacks. +
+
+ TYPE:
+ |
+
+ verbose
+ |
+
+
+
+ Whether to enable verbose mode. +
+
+ TYPE:
+ |
+
+ inheritable_tags
+ |
+
+
+
+
+ The inheritable tags. + |
+
+ local_tags
+ |
+
+
+
+
+ The local tags. + |
+
+ inheritable_metadata
+ |
+
+
+
+
+ The inheritable metadata. + |
+
+ local_metadata
+ |
+
+
+
+
+ The local metadata. + |
+
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ AsyncCallbackManager
+
+ |
+
+
+
+ The configured async callback manager. + |
+
+ __init__
+
+
+¶
+__init__(
+ handlers: list[BaseCallbackHandler],
+ inheritable_handlers: list[BaseCallbackHandler] | None = None,
+ parent_run_id: UUID | None = None,
+ *,
+ tags: list[str] | None = None,
+ inheritable_tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ inheritable_metadata: dict[str, Any] | None = None,
+) -> None
+Initialize callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handlers
+ |
+
+
+
+ The handlers. +
+
+ TYPE:
+ |
+
+ inheritable_handlers
+ |
+
+
+
+ The inheritable handlers. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The parent run ID. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ inheritable_tags
+ |
+
+
+
+
+ The inheritable tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ inheritable_metadata
+ |
+
+
+
+
+ The inheritable metadata. + |
+
+ merge
+
+
+¶
+merge(other: BaseCallbackManager) -> Self
+Merge the callback manager with another callback manager.
+May be overwritten in subclasses.
+Primarily used internally within merge_configs.
| RETURNS | +DESCRIPTION | +
|---|---|
+
+ Self
+
+ |
+
+
+
+ The merged callback manager of the same type as the current object. + |
+
Example
+# Merging two callback managers`
+from langchain_core.callbacks.manager import (
+ CallbackManager,
+ trace_as_chain_group,
+)
+from langchain_core.callbacks.stdout import StdOutCallbackHandler
+
+manager = CallbackManager(handlers=[StdOutCallbackHandler()], tags=["tag2"])
+with trace_as_chain_group("My Group Name", tags=["tag1"]) as group_manager:
+ merged_manager = group_manager.merge(manager)
+ print(merged_manager.handlers)
+ # [
+ # <langchain_core.callbacks.stdout.StdOutCallbackHandler object at ...>,
+ # <langchain_core.callbacks.streaming_stdout.StreamingStdOutCallbackHandler object at ...>,
+ # ]
+
+ print(merged_manager.tags)
+ # ['tag2', 'tag1']
+
+ add_handler
+
+
+¶
+add_handler(handler: BaseCallbackHandler, inherit: bool = True) -> None
+Add a handler to the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handler
+ |
+
+
+
+ The handler to add. +
+
+ TYPE:
+ |
+
+ inherit
+ |
+
+
+
+ Whether to inherit the handler. +
+
+ TYPE:
+ |
+
+ remove_handler
+
+
+¶
+remove_handler(handler: BaseCallbackHandler) -> None
+Remove a handler from the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handler
+ |
+
+
+
+ The handler to remove. +
+
+ TYPE:
+ |
+
+ set_handlers
+
+
+¶
+set_handlers(handlers: list[BaseCallbackHandler], inherit: bool = True) -> None
+Set handlers as the only handlers on the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handlers
+ |
+
+
+
+ The handlers to set. +
+
+ TYPE:
+ |
+
+ inherit
+ |
+
+
+
+ Whether to inherit the handlers. +
+
+ TYPE:
+ |
+
+ set_handler
+
+
+¶
+set_handler(handler: BaseCallbackHandler, inherit: bool = True) -> None
+Set handler as the only handler on the callback manager.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ handler
+ |
+
+
+
+ The handler to set. +
+
+ TYPE:
+ |
+
+ inherit
+ |
+
+
+
+ Whether to inherit the handler. +
+
+ TYPE:
+ |
+
+ add_tags
+
+
+¶
+
+
+
+
+
+ remove_tags
+
+
+¶
+
+
+
+
+
+ add_metadata
+
+
+¶
+
+
+
+
+
+ UsageMetadataCallbackHandler
+
+
+¶
+
+
+
+ Bases: BaseCallbackHandler
Callback Handler that tracks AIMessage.usage_metadata.
Example
+from langchain.chat_models import init_chat_model
+from langchain_core.callbacks import UsageMetadataCallbackHandler
+
+llm_1 = init_chat_model(model="openai:gpt-4o-mini")
+llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-20241022")
+
+callback = UsageMetadataCallbackHandler()
+result_1 = llm_1.invoke("Hello", config={"callbacks": [callback]})
+result_2 = llm_2.invoke("Hello", config={"callbacks": [callback]})
+callback.usage_metadata
+{'gpt-4o-mini-2024-07-18': {'input_tokens': 8,
+ 'output_tokens': 10,
+ 'total_tokens': 18,
+ 'input_token_details': {'audio': 0, 'cache_read': 0},
+ 'output_token_details': {'audio': 0, 'reasoning': 0}},
+ 'claude-3-5-haiku-20241022': {'input_tokens': 8,
+ 'output_tokens': 21,
+ 'total_tokens': 29,
+ 'input_token_details': {'cache_read': 0, 'cache_creation': 0}}}
+Added in langchain-core 0.3.49
| METHOD | +DESCRIPTION | +
|---|---|
__init__ |
+
+
+
+ Initialize the |
+
on_llm_end |
+
+
+
+ Collect token usage. + |
+
on_text |
+
+
+
+ Run on an arbitrary text. + |
+
on_retry |
+
+
+
+ Run on a retry event. + |
+
on_custom_event |
+
+
+
+ Override to define a handler for a custom event. + |
+
on_llm_start |
+
+
+
+ Run when LLM starts running. + |
+
on_chat_model_start |
+
+
+
+ Run when a chat model starts running. + |
+
on_retriever_start |
+
+
+
+ Run when the |
+
on_chain_start |
+
+
+
+ Run when a chain starts running. + |
+
on_tool_start |
+
+
+
+ Run when the tool starts running. + |
+
on_retriever_error |
+
+
+
+ Run when |
+
on_retriever_end |
+
+
+
+ Run when |
+
on_tool_end |
+
+
+
+ Run when the tool ends running. + |
+
on_tool_error |
+
+
+
+ Run when tool errors. + |
+
on_chain_end |
+
+
+
+ Run when chain ends running. + |
+
on_chain_error |
+
+
+
+ Run when chain errors. + |
+
on_agent_action |
+
+
+
+ Run on agent action. + |
+
on_agent_finish |
+
+
+
+ Run on the agent end. + |
+
on_llm_new_token |
+
+
+
+ Run on new output token. + |
+
on_llm_error |
+
+
+
+ Run when LLM errors. + |
+
+ raise_error
+
+
+
+ class-attribute
+ instance-attribute
+
+
+¶
+raise_error: bool = False
+Whether to raise an error if an exception occurs.
+ + +
+ run_inline
+
+
+
+ class-attribute
+ instance-attribute
+
+
+¶
+run_inline: bool = False
+Whether to run the callback inline.
+ + +
+ ignore_llm
+
+
+
+ property
+
+
+¶
+ignore_llm: bool
+Whether to ignore LLM callbacks.
+ + +
+ ignore_retry
+
+
+
+ property
+
+
+¶
+ignore_retry: bool
+Whether to ignore retry callbacks.
+ + +
+ ignore_chain
+
+
+
+ property
+
+
+¶
+ignore_chain: bool
+Whether to ignore chain callbacks.
+ + +
+ ignore_agent
+
+
+
+ property
+
+
+¶
+ignore_agent: bool
+Whether to ignore agent callbacks.
+ + +
+ ignore_retriever
+
+
+
+ property
+
+
+¶
+ignore_retriever: bool
+Whether to ignore retriever callbacks.
+ + +
+ ignore_chat_model
+
+
+
+ property
+
+
+¶
+ignore_chat_model: bool
+Whether to ignore chat model callbacks.
+ + +
+ ignore_custom_event
+
+
+
+ property
+
+
+¶
+ignore_custom_event: bool
+Ignore custom event.
+ + +
+ __init__
+
+
+¶
+
+
+ Initialize the UsageMetadataCallbackHandler.
+ on_llm_end
+
+
+¶
+on_llm_end(response: LLMResult, **kwargs: Any) -> None
+Collect token usage.
+ +
+ on_text
+
+
+¶
+
+
+ Run on an arbitrary text.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ text
+ |
+
+
+
+ The text. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retry
+
+
+¶
+on_retry(
+ retry_state: RetryCallState,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run on a retry event.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ retry_state
+ |
+
+
+
+ The retry state. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_custom_event
+
+
+¶
+on_custom_event(
+ name: str,
+ data: Any,
+ *,
+ run_id: UUID,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Override to define a handler for a custom event.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ name
+ |
+
+
+
+ The name of the custom event. +
+
+ TYPE:
+ |
+
+ data
+ |
+
+
+
+ The data for the custom event. +Format will match the format specified by the user. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags associated with the custom event (includes inherited tags). + |
+
+ metadata
+ |
+
+
+
+
+ The metadata associated with the custom event (includes inherited +metadata). + |
+
+ on_llm_start
+
+
+¶
+on_llm_start(
+ serialized: dict[str, Any],
+ prompts: list[str],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when LLM starts running.
+Warning
+This method is called for non-chat models (regular text completion LLMs). If
+you're implementing a handler for a chat model, you should use
+on_chat_model_start instead.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized LLM. + |
+
+ prompts
+ |
+
+
+
+
+ The prompts. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chat_model_start
+
+
+¶
+on_chat_model_start(
+ serialized: dict[str, Any],
+ messages: list[list[BaseMessage]],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when a chat model starts running.
+Warning
+This method is called for chat models. If you're implementing a handler for
+a non-chat model, you should use on_llm_start instead.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chat model. + |
+
+ messages
+ |
+
+
+
+ The messages. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retriever_start
+
+
+¶
+on_retriever_start(
+ serialized: dict[str, Any],
+ query: str,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when the Retriever starts running.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized |
+
+ query
+ |
+
+
+
+ The query. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chain_start
+
+
+¶
+on_chain_start(
+ serialized: dict[str, Any],
+ inputs: dict[str, Any],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when a chain starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chain. + |
+
+ inputs
+ |
+
+
+
+
+ The inputs. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_tool_start
+
+
+¶
+on_tool_start(
+ serialized: dict[str, Any],
+ input_str: str,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ metadata: dict[str, Any] | None = None,
+ inputs: dict[str, Any] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when the tool starts running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ serialized
+ |
+
+
+
+
+ The serialized chain. + |
+
+ input_str
+ |
+
+
+
+ The input string. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ metadata
+ |
+
+
+
+
+ The metadata. + |
+
+ inputs
+ |
+
+
+
+
+ The inputs. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retriever_error
+
+
+¶
+on_retriever_error(
+ error: BaseException,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run when Retriever errors.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ error
+ |
+
+
+
+ The error that occurred. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_retriever_end
+
+
+¶
+on_retriever_end(
+ documents: Sequence[Document],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run when Retriever ends running.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ documents
+ |
+
+
+
+
+ The documents retrieved. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_tool_end
+
+
+¶
+on_tool_end(
+ output: Any, *, run_id: UUID, parent_run_id: UUID | None = None, **kwargs: Any
+) -> Any
+Run when the tool ends running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ output
+ |
+
+
+
+ The output of the tool. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_tool_error
+
+
+¶
+on_tool_error(
+ error: BaseException,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run when tool errors.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ error
+ |
+
+
+
+ The error that occurred. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chain_end
+
+
+¶
+on_chain_end(
+ outputs: dict[str, Any],
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run when chain ends running.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ outputs
+ |
+
+
+
+
+ The outputs of the chain. + |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_chain_error
+
+
+¶
+on_chain_error(
+ error: BaseException,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run when chain errors.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ error
+ |
+
+
+
+ The error that occurred. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_agent_action
+
+
+¶
+on_agent_action(
+ action: AgentAction,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run on agent action.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ action
+ |
+
+
+
+ The agent action. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_agent_finish
+
+
+¶
+on_agent_finish(
+ finish: AgentFinish,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ **kwargs: Any,
+) -> Any
+Run on the agent end.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ finish
+ |
+
+
+
+ The agent finish. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_llm_new_token
+
+
+¶
+on_llm_new_token(
+ token: str,
+ *,
+ chunk: GenerationChunk | ChatGenerationChunk | None = None,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> Any
+Run on new output token.
+Only available when streaming is enabled.
+For both chat models and non-chat models (legacy text completion LLMs).
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ token
+ |
+
+
+
+ The new token. +
+
+ TYPE:
+ |
+
+ chunk
+ |
+
+
+
+ The new generated chunk, containing content and other information. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ on_llm_error
+
+
+¶
+on_llm_error(
+ error: BaseException,
+ *,
+ run_id: UUID,
+ parent_run_id: UUID | None = None,
+ tags: list[str] | None = None,
+ **kwargs: Any,
+) -> Any
+Run when LLM errors.
+ + +| PARAMETER | +DESCRIPTION | +
|---|---|
+ error
+ |
+
+
+
+ The error that occurred. +
+
+ TYPE:
+ |
+
+ run_id
+ |
+
+
+
+ The ID of the current run. +
+
+ TYPE:
+ |
+
+ parent_run_id
+ |
+
+
+
+ The ID of the parent run. +
+
+ TYPE:
+ |
+
+ tags
+ |
+
+
+
+
+ The tags. + |
+
+ **kwargs
+ |
+
+
+
+ Additional keyword arguments. +
+
+ TYPE:
+ |
+
+ get_usage_metadata_callback
+
+
+¶
+get_usage_metadata_callback(
+ name: str = "usage_metadata_callback",
+) -> Generator[UsageMetadataCallbackHandler, None, None]
+Get usage metadata callback.
+Get context manager for tracking usage metadata across chat model calls using
+AIMessage.usage_metadata.
| PARAMETER | +DESCRIPTION | +
|---|---|
+ name
+ |
+
+
+
+ The name of the context variable. +
+
+ TYPE:
+ |
+
| YIELDS | +DESCRIPTION | +
|---|---|
+
+ UsageMetadataCallbackHandler
+
+ |
+
+
+
+ The usage metadata callback. + |
+
Example
+from langchain.chat_models import init_chat_model
+from langchain_core.callbacks import get_usage_metadata_callback
+
+llm_1 = init_chat_model(model="openai:gpt-4o-mini")
+llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-20241022")
+
+with get_usage_metadata_callback() as cb:
+ llm_1.invoke("Hello")
+ llm_2.invoke("Hello")
+ print(cb.usage_metadata)
+{
+ "gpt-4o-mini-2024-07-18": {
+ "input_tokens": 8,
+ "output_tokens": 10,
+ "total_tokens": 18,
+ "input_token_details": {"audio": 0, "cache_read": 0},
+ "output_token_details": {"audio": 0, "reasoning": 0},
+ },
+ "claude-3-5-haiku-20241022": {
+ "input_tokens": 8,
+ "output_tokens": 21,
+ "total_tokens": 29,
+ "input_token_details": {"cache_read": 0, "cache_creation": 0},
+ },
+}
+Added in langchain-core 0.3.49
+
+Prompting and orchestration logic can be used to summarize the message history. For example, in LangGraph you can extend the [`MessagesState`](/oss/python/langgraph/graph-api#working-with-messages-in-graph-state) to include a `summary` key:
+
+```python theme={null}
+from langgraph.graph import MessagesState
+class State(MessagesState):
+ summary: str
+```
+
+Then, you can generate a summary of the chat history, using any existing summary as context for the next summary. This `summarize_conversation` node can be called after some number of messages have accumulated in the `messages` state key.
+
+```python theme={null}
+def summarize_conversation(state: State):
+
+ # First, we get any existing summary
+ summary = state.get("summary", "")
+
+ # Create our summarization prompt
+ if summary:
+
+ # A summary already exists
+ summary_message = (
+ f"This is a summary of the conversation to date: {summary}\n\n"
+ "Extend the summary by taking into account the new messages above:"
+ )
+
+ else:
+ summary_message = "Create a summary of the conversation above:"
+
+ # Add prompt to our history
+ messages = state["messages"] + [HumanMessage(content=summary_message)]
+ response = model.invoke(messages)
+
+ # Delete all but the 2 most recent messages
+ delete_messages = [RemoveMessage(id=m.id) for m in state["messages"][:-2]]
+ return {"summary": response.content, "messages": delete_messages}
+```
+
+