Why am I seeing this error from a file in the pinecone-client library when running my code? I saw that the chain runs properly on its own, the only problem seems to be its integration in the streamlit app, but why is that?
###############################################
AttributeError: ‘NoneType’ object has no attribute ‘api_key’
Traceback:
File "/home/angelo/miniforge3/lib/python3.10/site-packages/streamlit/runtime/scriptrunner/script_runner.py", line 541, in _run_script
exec(code, module.__dict__)File "/home/angelo/Desktop/chatbot_3.py", line 135, in <module>
idx = pinecone.Index('pgdl')File "/home/angelo/miniforge3/lib/python3.10/site-packages/pinecone/index.py", line 57, in __init__
openapi_client_config.api_key = openapi_client_config.api_key or {}
############################################################
class StreamHandler(BaseCallbackHandler):
def init(self, container, initial_text=“”, display_method=‘markdown’):
self.container = container
self.text = initial_text
self.display_method = display_method
def on_llm_new_token(self, token: str, **kwargs) -> None:
self.text += token
display_function = getattr(self.container, self.display_method, None)
if display_function is not None:
display_function(self.text)
else:
raise ValueError(f"Invalid display_method: {self.display_method}")
def run_chatbot_app():
# Initialize Pinecone
pinecone.init(api_key=“xxxxxxxxxxxxxxxxxxxxxxx”, environment=gcp-free")
OPENAI_API_KEY = ‘xxxxxxxxxxxxxxxxxxxxxxx’
idx = pinecone.Index(‘pgdl’)
# retriever
hs_retriever = PineconeHybridSearchRetriever(
embeddings=embed, sparse_encoder=sparse_encoder, index=idx, top_k=1, alpha=0.5
)
# conversational memory
conv_mem = ConversationBufferWindowMemory(
memory_key = 'chat_history',
k = 5,
return_messages = True
)
# prompt template
template = "(...)".
{context}
Human: {human_input}
Chatbot:"""
prompt = PromptTemplate(
input_variables=["human_input", "context"],
template=template
)
# Streamlit app
st.title('ChatPGDL')
# Input for the user to enter their query
query = st.text_input('Make a question)
if st.button('Buscar'):
# Chatbot
st.subheader('Resposta:')
# llm setup
chat_box = st.empty()
stream_handler = StreamHandler(chat_box, display_method='write')
llm = OpenAI(temperature=0, openai_api_key=OPENAI_API_KEY, model_name="gpt-3.5-turbo",
callbacks=[stream_handler], streaming=True
)
# Create the chain
chain = ConversationalRetrievalChain.from_llm(
llm=llm,
retriever=hs_retriever,
condense_question_prompt=prompt,
memory=conv_mem
)
# Call the chain with the query
output = chain({"question": query})
# Get the chatbot's response
response = output['answer']
run_chatbot_app()