AttributeError: 'Str' Objekt hat in Langchain_core kein Attribut 'model_dump'
Posted: 07 Feb 2025, 03:31
Ich habe einen Fehler aufgetreten, während ich mit der Langchain_Core Langchain-Openai Bibliothek gearbeitet habe, und ich hoffe, dass mir jemand bei der Lösung dieses Problems unterstützen kann.
Fehlermeldung: < /h3>
Beispielcode:
Fehlermeldung: < /h3>
Code: Select all
AttributeError: 'str' object has no attribute 'model_dump'
Beispielcode:
Code: Select all
import pandas as pd
from data_api import *
from langchain_openai import ChatOpenAI
# from langchain.chat_models import ChatOpenAI
from dotenv import load_dotenv, find_dotenv
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from perplexity.perplexity import search_chat_completion
from langchain.prompts import PromptTemplate
from langchain_core.runnables import RunnableLambda
from operator import itemgetter
import json
import os
from typing import List, Dict, Any
from tqdm import tqdm
self.llm = ChatOpenAI(model='gpt-4o', temperature=0)
self.mini = ChatOpenAI(model='gpt-4o-mini', temperature=0)
self.pplx = ChatOpenAI(base_url="https://api.perplexity.ai",
model='llama-3.1-sonar-huge-128k-online')
self.o1 = ChatOpenAI(model='o1-preview', temperature=1)
chain = (
{
"chain1": chain1,
"chain2": chain2,
"chain3": chain3,
"chain4": chain4,
"chain5": chain5,
"chain6": chain6,
"chain7": chain7,
"company": itemgetter("company")
}
| PromptTemplate.from_template(
"""
{chain1}
{chain2}
{chain3}
{chain4}
{chain5}
{chain6}
{chain7}
"""
) | self.o1 | StrOutputParser()
)
return chain.invoke({"company": symbol})
< /code>
Traceback: < /h3>
AttributeError("'str' object has no attribute 'model_dump'")Traceback (most recent call last):
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3022, in invoke
input = context.run(step.invoke, input, config, **kwargs)
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3727, in invoke
output = {key: future.result() for key, future in zip(steps, futures)}
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3727, in
output = {key: future.result() for key, future in zip(steps, futures)}
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/concurrent/futures/_base.py", line 439, in result
return self.__get_result()
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/concurrent/futures/_base.py", line 391, in __get_result
raise self._exception
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/concurrent/futures/thread.py", line 58, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3711, in _invoke_step
return context.run(
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3022, in invoke
input = context.run(step.invoke, input, config, **kwargs)
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3727, in invoke
output = {key: future.result() for key, future in zip(steps, futures)}
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3727, in
output = {key: future.result() for key, future in zip(steps, futures)}
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/concurrent/futures/_base.py", line 446, in result
return self.__get_result()
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/concurrent/futures/_base.py", line 391, in __get_result
raise self._exception
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/concurrent/futures/thread.py", line 58, in run
result = self.fn(*self.args, **self.kwargs)
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3711, in _invoke_step
return context.run(
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3024, in invoke
input = context.run(step.invoke, input, config)
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/language_models/chat_models.py", line 286, in invoke
self.generate_prompt(
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/language_models/chat_models.py", line 786, in generate_prompt
return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/language_models/chat_models.py", line 643, in generate
raise e
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/language_models/chat_models.py", line 633, in generate
self._generate_with_cache(
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/language_models/chat_models.py", line 851, in _generate_with_cache
result = self._generate(
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_openai/chat_models/base.py", line 718, in _generate
return self._create_chat_result(response, generation_info)
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_openai/chat_models/base.py", line 745, in _create_chat_result
response if isinstance(response, dict) else response.model_dump()
AttributeError: 'str' object has no attribute 'model_dump'
< /code>
Umgebungsinformationen: < /p>
Python 3 < /li>
< /ul>
Paket < /th>
Version < /th>
< /tr>
< /thead>
Langchain < / td>
0.3.13
< /tr>
Langchain-Community < /td>
< td> 0.3.13 < /td>
< /tr>
Langchain-Core < /td>
0.3.28
< /tr>
Langchain-openai < /td>
0.2.14
< /Tr>
Langchain-Text-Splitters < /td>
0.3.4
< /tr>
LangGraph < /td>
0.2.59
< /tr>
LangGraph-Checkpoint < /td>
2.0.9
< /tr>
Langgraph-Sdk < /td>
0.1.44
< /tr>
Langsmith < /td>
0.2.3 < /td>
< /tr>
OpenAI < /td>
1.58.1 < /td> < Br /> < /tr>
Pydantic < /td>
2.10.4
< /tr>
pydantic_core < /td>
2.27.2
< /tr>
Pydantic-Settings
2.7.0
versuchte Fix < /h3>
Ich habe die Parser -Funktion aktualisiert, um STR -Objekte zu verarbeiten, aber das Problem bleibt bestehen: < /p>
def wrap_chain_output(chain_output):
"""Wrap children chain output."""
if isinstance(chain_output, str):
return chain_output
elif isinstance(chain_output, AIMessage):
return chain_output.content
else:
raise TypeError("Unsupported type for chain_output")
< /code>
Beispiel für die Kettenkette < /li>
< /ul>
chain216 = (
PromptTemplate.from_template(
"""
**Search for the industry corresponding to the company **{company}** and then conduct research.**
Engage in divergent thinking, including but not limited to the following keywords.
"""
)
| self.pplx
)
chain2 = (
{
"chain211": chain211 | (lambda x: wrap_chain_output(x)),
"chain212": chain212 | (lambda x: wrap_chain_output(x)),
"chain213": chain213 | (lambda x: wrap_chain_output(x)),
"chain214": chain214 | (lambda x: wrap_chain_output(x)),
"chain215": chain215 | (lambda x: wrap_chain_output(x)),
"chain216": chain216 | (lambda x: wrap_chain_output(x)),
"company": itemgetter("company")
}
| PromptTemplate.from_template(
"""
## {company}
{chain211}
{chain212}
{chain213}
{chain214}
{chain215}
{chain216}
"""
)
| self.llm
)
< /code>
Langsmith -Fehler -Trace -Info:
LG -Bugs.jpg < /p>
< /li>
Fehlerinformationen: < /p>
< /li>
< /ul>
AttributeError("'str' object has no attribute 'model_dump'")Traceback (most recent call last):
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/language_models/chat_models.py", line 633, in generate
self._generate_with_cache(
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/language_models/chat_models.py", line 851, in _generate_with_cache
result = self._generate(
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_openai/chat_models/base.py", line 718, in _generate
return self._create_chat_result(response, generation_info)
File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_openai/chat_models/base.py", line 745, in _create_chat_result
response if isinstance(response, dict) else response.model_dump()
AttributeError: 'str' object has no attribute 'model_dump'