When I run:
from langchain.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
model = ChatOpenAI()
prompt = ChatPromptTemplate.from_template("tell me a joke about {topic}")
chain = prompt | model | StrOutputParser
chain.invoke({"topic": "bears"})
I get:
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
[<ipython-input-48-da226a1d50ac>](https://localhost:8080/#) in <cell line: 8>()
6 prompt = ChatPromptTemplate.from_template("tell me a joke about {topic}")
7 chain = prompt | model | StrOutputParser
----> 8 chain.invoke({"topic": "bears"})
5 frames
[/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py](https://localhost:8080/#) in invoke(self, input, config)
1512 try:
1513 for i, step in enumerate(self.steps):
-> 1514 input = step.invoke(
1515 input,
1516 # mark each step as a child run
[/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py](https://localhost:8080/#) in invoke(self, input, config, **kwargs)
2629 """Invoke this runnable synchronously."""
2630 if hasattr(self, "func"):
-> 2631 return self._call_with_config(
2632 self._invoke,
2633 input,
[/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py](https://localhost:8080/#) in _call_with_config(self, func, input, config, run_type, **kwargs)
884 )
885 try:
--> 886 output = call_func_with_variable_args(
887 func, input, config, run_manager, **kwargs
888 )
[/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/config.py](https://localhost:8080/#) in call_func_with_variable_args(func, input, config, run_manager, **kwargs)
306 if run_manager is not None and accepts_run_manager(func):
307 kwargs["run_manager"] = run_manager
--> 308 return func(input, **kwargs) # type: ignore[call-arg]
309
310
[/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/base.py](https://localhost:8080/#) in _invoke(self, input, run_manager, config, **kwargs)
2547 **kwargs: Any,
2548 ) -> Output:
-> 2549 output = call_func_with_variable_args(
2550 self.func, input, config, run_manager, **kwargs
2551 )
[/usr/local/lib/python3.10/dist-packages/langchain_core/runnables/config.py](https://localhost:8080/#) in call_func_with_variable_args(func, input, config, run_manager, **kwargs)
306 if run_manager is not None and accepts_run_manager(func):
307 kwargs["run_manager"] = run_manager
--> 308 return func(input, **kwargs) # type: ignore[call-arg]
309
310
TypeError: Serializable.__init__() takes 1 positional argument but 2 were given
I even get the same thing from this very simple example:
prompt2 = ChatPromptTemplate.from_template("What's up?")
chain = prompt2 | model | StrOutputParser
chain.invoke({})
I've followed the LangChain docs on Prompt + LLM, and it looks like I'm doing the same thing as the official docs.
Output parsers require parentheses. So, instead of StrOutputParser
, call StrOutputParser()
.
Try this:
from langchain.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
model = ChatOpenAI()
prompt = ChatPromptTemplate.from_template("tell me a joke about {topic}")
chain = prompt | model | StrOutputParser()
chain.invoke({"topic": "bears"})
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With