You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Error belowseems like the DICT is ignoring the LLM and trying to use an LLM from LM STudio even though I am only using embeddings from it.
Can see embeddings working in LM Studio
🖇 AgentOps: This run cost $0.000000
Traceback (most recent call last):
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/git.py", line 164, in <module>
result = crew.kickoff()
^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/crewai/crew.py", line 252, in kickoff
result = self._run_sequential_process()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/crewai/crew.py", line 293, in _run_sequential_process
output = task.execute(context=task_output)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/crewai/task.py", line 173, in execute
result = self._execute(
^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/crewai/task.py", line 182, in _execute
result = agent.execute_task(
^^^^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/crewai/agent.py", line 221, in execute_task
result = self.agent_executor.invoke(
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain/chains/base.py", line 163, in invoke
raise e
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain/chains/base.py", line 153, in invoke
self._call(inputs, run_manager=run_manager)
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/crewai/agents/executor.py", line 124, in _call
next_step_output = self._take_next_step(
^^^^^^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain/agents/agent.py", line 1138, in _take_next_step
[
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain/agents/agent.py", line 1138, in <listcomp>
[
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/crewai/agents/executor.py", line 186, in _iter_next_step
output = self.agent.plan(
^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain/agents/agent.py", line 397, in plan
for chunk in self.runnable.stream(inputs, config={"callbacks": callbacks}):
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 2875, in stream
yield from self.transform(iter([input]), config, **kwargs)
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 2862, in transform
yield from self._transform_stream_with_config(
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 1881, in _transform_stream_with_config
chunk: Output = context.run(next, iterator) # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 2826, in _transform
for output in final_pipeline:
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 1282, in transform
for ichunk in input:
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 4736, in transform
yield from self.bound.transform(
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 1300, in transform
yield from self.stream(final, config, **kwargs)
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 249, in stream
raise e
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 229, in stream
for chunk in self._stream(messages, stop=stop, **kwargs):
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/langchain_openai/chat_models/base.py", line 408, in _stream
for chunk in self.client.create(messages=message_dicts, **params):
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/agentops/llm_tracker.py", line 208, in patched_function
result = original_create(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/openai/_utils/_utils.py", line 277, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/openai/resources/chat/completions.py", line 579, in create
return self._post(
^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/openai/_base_client.py", line 1232, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/openai/_base_client.py", line 921, in request
return self._request(
^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/openai/_base_client.py", line 997, in _request
return self._retry_request(
^^^^^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/openai/_base_client.py", line 1045, in _retry_request
return self._request(
^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/openai/_base_client.py", line 997, in _request
return self._retry_request(
^^^^^^^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/openai/_base_client.py", line 1045, in _retry_request
return self._request(
^^^^^^^^^^^^^^
File "/Volumes/Home_Profile/tech/Developer/github/crewai/basic_example + Tool/.venv/lib/python3.11/site-packages/openai/_base_client.py", line 1012, in _request
raise self._make_status_error_from_response(err.response) from None
openai.InternalServerError: Error code: 500 - {'error': 'No models loaded. Please load a model in LM Studio first.'}```
The text was updated successfully, but these errors were encountered:
Using latest 0.1.28 crewai + tools
Error belowseems like the DICT is ignoring the LLM and trying to use an LLM from LM STudio even though I am only using embeddings from it.
Can see embeddings working in LM Studio
The text was updated successfully, but these errors were encountered: