Hi guys, I'm trying to run Trulens evaluation with a Redis connection. My code:
llm = AzureOpenAI(
api_key=os.getenv("OPENAI_API_KEY"),
api_version=os.getenv("OPENAI_API_VERSION"),
azure_endpoint=os.getenv("OPENAI_API_BASE_llama"),
engine=COMPLETIONS_MODEL,
model=COMPLETIONS_MODEL,
temperature=0,
)
embed_model = AzureOpenAIEmbedding(
api_base=os.getenv("OPENAI_API_BASE_llama",),
api_key=os.getenv("OPENAI_API_KEY"),
api_version=os.getenv("OPENAI_API_VERSION"),
azure_endpoint=os.getenv("OPENAI_API_BASE_llama"),
)
tru = Tru()
vector_store = RedisVectorStore(index_name="test_process",redis_url=local_url)
index = VectorStoreIndex.from_vector_store(vector_store=vector_store)
query_engine = index.as_query_engine()
query = "How to replace Blanket?"
answer = query_engine.query(query)
I got error when trying to answer on the question:
File "c:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\docs_processing\eval_trulens_process.py", line 72, in <module>
answer = query_engine.query(query)
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\llama_index\core\base_query_engine.py", line 30, in query
return self._query(str_or_query_bundle)
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\llama_index\query_engine\retriever_query_engine.py", line 170, in _query
nodes = self.retrieve(query_bundle)
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\llama_index\query_engine\retriever_query_engine.py", line 126, in retrieve
nodes = self._retriever.retrieve(query_bundle)
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\llama_index\core\base_retriever.py", line 54, in retrieve
nodes = self._retrieve(query_bundle)
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\llama_index\indices\vector_store\retrievers\retriever.py", line 84, in _retrieve
self._service_context.embed_model.get_agg_embedding_from_queries(
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\llama_index\embeddings\base.py", line 142, in get_agg_embedding_from_queries
query_embeddings = [self.get_query_embedding(query) for query in queries]
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\llama_index\embeddings\base.py", line 142, in <listcomp>
query_embeddings = [self.get_query_embedding(query) for query in queries]
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\llama_index\embeddings\base.py", line 111, in get_query_embedding
query_embedding = self._get_query_embedding(query)
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\llama_index\embeddings\openai.py", line 340, in _get_query_embedding
return get_embedding(
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\tenacity\__init__.py", line 289, in wrapped_f
return self(f, *args, **kw)
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\tenacity\__init__.py", line 379, in __call__
do = self.iter(retry_state=retry_state)
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\tenacity\__init__.py", line 325, in iter
raise retry_exc.reraise()
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\tenacity\__init__.py", line 158, in reraise
raise self.last_attempt.result()
File "C:\Users\yaakobi\AppData\Local\Programs\Python\Python310\lib\concurrent\futures\_base.py", line 451, in result
return self.__get_result()
File "C:\Users\yaakobi\AppData\Local\Programs\Python\Python310\lib\concurrent\futures\_base.py", line 403, in __get_result
raise self._exception
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\tenacity\__init__.py", line 382, in __call__
result = fn(*args, **kwargs)
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\llama_index\embeddings\openai.py", line 119, in get_embedding
client.embeddings.create(input=[text], model=engine, **kwargs).data[0].embedding
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\openai\resources\embeddings.py", line 103, in create
return self._post(
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\openai\_base_client.py", line 1091, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\openai\_base_client.py", line 852, in request
return self._request(
File "C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\venv_py10\lib\site-packages\openai\_base_client.py", line 933, in _request
raise self._make_status_error_from_response(err.response) from None
openai.AuthenticationError: Error code: 401 - {'error': {'message': 'Incorrect API key provided: 98f94b94********************61ee. You can find your API key at https://platform.openai.com/account/api-keys.', 'type': 'invalid_request_error', 'param': None, 'code': 'invalid_api_key'}}
PS C:\Users\yaakobi\OneDrive - HP Inc\Git\Chatbot_POC_V2-main\docs_processing>
I think it accesses Openai instead of Azure. Note that when I'm running as in the gut example(without Redis) it runs perfectly Does anyone have any insight?
Comment From: madolson
openai.AuthenticationError: Error code: 401 - {'error': {'message': 'Incorrect API key provided: 98f94b94****61ee. You can find your API key at https://platform.openai.com/account/api-keys.', 'type': 'invalid_request_error', 'param': None, 'code': 'invalid_api_key'}}
doesn't seem very redis related. I'm not sure what your root cause is, but we try to keep the issues for Redis related issues.