Commit
·
c476a18
1
Parent(s):
0624b2b
- models.py +2 -2
- rabbit_repo.py +5 -3
- service.py +3 -3
models.py
CHANGED
|
@@ -77,7 +77,6 @@ class UserInfo(BaseModel):
|
|
| 77 |
# JsonIgnore MonitorIPs in C#: we do not include it here
|
| 78 |
|
| 79 |
|
| 80 |
-
|
| 81 |
class LLMServiceObj(BaseModel):
|
| 82 |
# allow using field names or aliases
|
| 83 |
model_config = ConfigDict(populate_by_name=True)
|
|
@@ -128,7 +127,8 @@ class LLMServiceObj(BaseModel):
|
|
| 128 |
IsFunctionCallError: bool = False
|
| 129 |
IsFunctionCallStatus: bool = False
|
| 130 |
IsFunctionStillRunning: bool = False
|
| 131 |
-
|
|
|
|
| 132 |
class ResultObj(BaseModel):
|
| 133 |
Message: str = ""
|
| 134 |
Success: bool = False
|
|
|
|
| 77 |
# JsonIgnore MonitorIPs in C#: we do not include it here
|
| 78 |
|
| 79 |
|
|
|
|
| 80 |
class LLMServiceObj(BaseModel):
|
| 81 |
# allow using field names or aliases
|
| 82 |
model_config = ConfigDict(populate_by_name=True)
|
|
|
|
| 127 |
IsFunctionCallError: bool = False
|
| 128 |
IsFunctionCallStatus: bool = False
|
| 129 |
IsFunctionStillRunning: bool = False
|
| 130 |
+
|
| 131 |
+
|
| 132 |
class ResultObj(BaseModel):
|
| 133 |
Message: str = ""
|
| 134 |
Success: bool = False
|
rabbit_repo.py
CHANGED
|
@@ -22,18 +22,20 @@ class RabbitRepo(RabbitBase):
|
|
| 22 |
|
| 23 |
async def publish(self, exchange: str, obj: Any, routing_key: str = ""):
|
| 24 |
ex = await self.ensure_exchange(exchange)
|
|
|
|
| 25 |
evt = CloudEvent.wrap(
|
| 26 |
event_id=str(uuid.uuid4()),
|
| 27 |
event_type=(obj.__class__.__name__ if obj is not None else "NullOrEmpty"),
|
| 28 |
source=self._source,
|
| 29 |
-
data=obj if not hasattr(obj, "model_dump") else obj.model_dump(),
|
| 30 |
-
|
| 31 |
body = evt.model_dump_json(exclude_none=True).encode("utf-8")
|
|
|
|
| 32 |
await ex.publish(aio_pika.Message(body=body), routing_key=routing_key)
|
| 33 |
|
| 34 |
async def publish_jsonz(self, exchange: str, obj: Any, routing_key: str = "", with_id: Optional[str] = None) -> str:
|
| 35 |
ex = await self.ensure_exchange(exchange)
|
| 36 |
-
datajson = to_json(obj if not hasattr(obj, "model_dump") else obj.model_dump())
|
| 37 |
datajsonZ = json_compress_str(datajson)
|
| 38 |
payload: Any = (datajsonZ, with_id) if with_id else datajsonZ
|
| 39 |
|
|
|
|
| 22 |
|
| 23 |
async def publish(self, exchange: str, obj: Any, routing_key: str = ""):
|
| 24 |
ex = await self.ensure_exchange(exchange)
|
| 25 |
+
# rabbit_repo.py
|
| 26 |
evt = CloudEvent.wrap(
|
| 27 |
event_id=str(uuid.uuid4()),
|
| 28 |
event_type=(obj.__class__.__name__ if obj is not None else "NullOrEmpty"),
|
| 29 |
source=self._source,
|
| 30 |
+
data=obj if not hasattr(obj, "model_dump") else obj.model_dump(by_alias=True),
|
| 31 |
+
)
|
| 32 |
body = evt.model_dump_json(exclude_none=True).encode("utf-8")
|
| 33 |
+
|
| 34 |
await ex.publish(aio_pika.Message(body=body), routing_key=routing_key)
|
| 35 |
|
| 36 |
async def publish_jsonz(self, exchange: str, obj: Any, routing_key: str = "", with_id: Optional[str] = None) -> str:
|
| 37 |
ex = await self.ensure_exchange(exchange)
|
| 38 |
+
datajson = to_json(obj if not hasattr(obj, "model_dump") else obj.model_dump(by_alias=True))
|
| 39 |
datajsonZ = json_compress_str(datajson)
|
| 40 |
payload: Any = (datajsonZ, with_id) if with_id else datajsonZ
|
| 41 |
|
service.py
CHANGED
|
@@ -115,7 +115,7 @@ class LLMService:
|
|
| 115 |
pass
|
| 116 |
|
| 117 |
# Create runner from factory (pass a plain dict for decoupling)
|
| 118 |
-
runner = await self._runner_factory(llm.model_dump())
|
| 119 |
if not runner.IsEnabled:
|
| 120 |
await self._emit_result(
|
| 121 |
llm,
|
|
@@ -129,7 +129,7 @@ class LLMService:
|
|
| 129 |
llm, f"Starting {runner.Type} {settings.SERVICE_ID} Expert", True, "llmServiceMessage", check_system=True
|
| 130 |
)
|
| 131 |
|
| 132 |
-
await runner.StartProcess(llm.model_dump())
|
| 133 |
|
| 134 |
self._sessions[session_id] = _Session(Runner=runner, FullSessionId=session_id)
|
| 135 |
|
|
@@ -214,7 +214,7 @@ class LLMService:
|
|
| 214 |
return
|
| 215 |
|
| 216 |
# Let runner push partials itself if desired; we still return a small ack
|
| 217 |
-
await r.SendInputAndGetResponse(llm.model_dump())
|
| 218 |
|
| 219 |
async def QueryIndexResult(self, payload: Any) -> None:
|
| 220 |
"""
|
|
|
|
| 115 |
pass
|
| 116 |
|
| 117 |
# Create runner from factory (pass a plain dict for decoupling)
|
| 118 |
+
runner = await self._runner_factory(llm.model_dump(by_alias=True))
|
| 119 |
if not runner.IsEnabled:
|
| 120 |
await self._emit_result(
|
| 121 |
llm,
|
|
|
|
| 129 |
llm, f"Starting {runner.Type} {settings.SERVICE_ID} Expert", True, "llmServiceMessage", check_system=True
|
| 130 |
)
|
| 131 |
|
| 132 |
+
await await runner.StartProcess(llm.model_dump(by_alias=True))
|
| 133 |
|
| 134 |
self._sessions[session_id] = _Session(Runner=runner, FullSessionId=session_id)
|
| 135 |
|
|
|
|
| 214 |
return
|
| 215 |
|
| 216 |
# Let runner push partials itself if desired; we still return a small ack
|
| 217 |
+
await r.SendInputAndGetResponse(llm.model_dump(by_alias=True))
|
| 218 |
|
| 219 |
async def QueryIndexResult(self, payload: Any) -> None:
|
| 220 |
"""
|