johnbridges commited on
Commit
7255aeb
·
1 Parent(s): 5cd620c

fixed passing parameters to runner

Browse files
Files changed (1) hide show
  1. service.py +11 -7
service.py CHANGED
@@ -39,12 +39,12 @@ class LLMService:
39
  # ---------------------------- helpers ----------------------------
40
 
41
  def _to_model(self, data: Any) -> LLMServiceObj:
42
- """Accepts LLMServiceObj or dict and returns a validated LLMServiceObj."""
43
- if isinstance(data, LLMServiceObj):
44
- return data
45
- if isinstance(data, dict):
46
- return LLMServiceObj(**data)
47
- raise TypeError("LLMService expects an object payload (dict/LLMServiceObj).")
48
 
49
  async def _emit_result(
50
  self,
@@ -115,7 +115,11 @@ class LLMService:
115
  pass
116
 
117
  # Create runner from factory (pass a plain dict for decoupling)
118
- runner = await self._runner_factory(llm.model_dump(by_alias=True))
 
 
 
 
119
  if not runner.IsEnabled:
120
  await self._emit_result(
121
  llm,
 
39
  # ---------------------------- helpers ----------------------------
40
 
41
  def _to_model(self, data: Any) -> LLMServiceObj:
42
+ # Defensive: ensure required nested objects are dicts, not None
43
+ if data.get("FunctionCallData") is None:
44
+ data["FunctionCallData"] = {}
45
+ if data.get("UserInfo") is None:
46
+ data["UserInfo"] = {}
47
+ return LLMServiceObj(**data)
48
 
49
  async def _emit_result(
50
  self,
 
115
  pass
116
 
117
  # Create runner from factory (pass a plain dict for decoupling)
118
+ runner = await self._runner_factory({
119
+ **llm.model_dump(by_alias=True),
120
+ "_publisher": self._pub,
121
+ "_settings": settings,
122
+ })
123
  if not runner.IsEnabled:
124
  await self._emit_result(
125
  llm,