TypeError: Llama.create_chat_completion() missing 1 required positional argument: 'self'
Opened this issue · 1 comments
MimiCheng commented
I'm following 05-local-execution.ipynb
tutorial and got this error while executing dynamic routing out = rl("what's the time in Rome right now?")
.
Any ideas how to solve this?
TypeError Traceback (most recent call last)
Cell In[15], line 1
----> 1 out = rl("what's the time in Rome right now?")
2 print(out)
3 # get_time(**out.function_call)
File /opt/conda/envs/nemo/lib/python3.12/site-packages/semantic_router/layer.py:262, in RouteLayer.__call__(self, text, vector, simulate_static)
260 else:
261 route.llm = self.llm
--> 262 return route(text)
263 elif passed and route is not None and simulate_static:
264 return RouteChoice(
265 name=route.name,
266 function_call=None,
267 similarity_score=None,
268 )
File /opt/conda/envs/nemo/lib/python3.12/site-packages/semantic_router/route.py:70, in Route.__call__(self, query)
65 raise ValueError(
66 "Query is required for dynamic routes. Please ensure the `query` "
67 "argument is passed."
68 )
69 # if a function schema is provided we generate the inputs
---> 70 extracted_inputs = self.llm.extract_function_inputs(
...
62 )
63 assert isinstance(completion, dict) # keep mypy happy
64 output = completion["choices"][0]["message"]["content"]
TypeError: Llama.create_chat_completion() missing 1 required positional argument: 'self'