GradLLM / models.py
johnbridges's picture
.
c476a18
raw
history blame
4.4 kB
# models.py
from typing import Any, Optional, List, Dict
from datetime import datetime, timezone
from pydantic import BaseModel, Field, ConfigDict
# ---------- CloudEvent (Pydantic v2) ----------
class CloudEvent(BaseModel):
specversion: str = "1.0"
id: str
type: str
source: str
time: datetime
datacontenttype: str = "application/json"
data: Optional[Any] = None
@staticmethod
def now_utc() -> datetime:
return datetime.now(timezone.utc)
@classmethod
def wrap(cls, *, event_id: str, event_type: str, source: str, data: Any) -> "CloudEvent":
return cls(
id=event_id,
type=event_type or ("NullOrEmpty" if data is None else type(data).__name__),
source=source,
time=cls.now_utc(),
data=data,
)
# ---------- FunctionCallData (from your C#) ----------
class FunctionCallData(BaseModel):
function: Optional[str] = None
name: Optional[str] = None
arguments: Optional[Dict[str, Any]] = None
parameters: Optional[Dict[str, Any]] = None
# ---------- FunctionState (from your C#) ----------
class FunctionState(BaseModel):
IsFunctionCall: bool = False
IsFunctionCallResponse: bool = False
IsFunctionCallError: bool = False
IsFunctionCallStatus: bool = False
IsFunctionStillRunning: bool = False
# ---------- UserInfo (from your C#; omitting JsonIgnored/NotMapped collections) ----------
class UserInfo(BaseModel):
UserID: Optional[str] = None
DateCreated: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
HostLimit: int = 0
DisableEmail: bool = False
Status: Optional[str] = ""
Name: Optional[str] = ""
Given_name: Optional[str] = ""
Family_name: Optional[str] = ""
Nickname: Optional[str] = ""
Sub: Optional[str] = ""
Enabled: bool = True
MonitorAlertEnabled: bool = True
PredictAlertEnabled: bool = False
AccountType: Optional[str] = "Default"
Email: Optional[str] = ""
Email_verified: bool = False
Picture: Optional[str] = ""
Updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
LastLoginDate: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
CustomerId: Optional[str] = ""
CancelAt: Optional[datetime] = None
TokensUsed: int = 0
# NotMapped in C#: model as loose dict so it round-trips safely
LoadServer: Dict[str, Any] = Field(default_factory=dict)
# JsonIgnore MonitorIPs in C#: we do not include it here
class LLMServiceObj(BaseModel):
# allow using field names or aliases
model_config = ConfigDict(populate_by_name=True)
# strings
SessionId: str = ""
JsonFunction: str = ""
LlmMessage: str = ""
ResultMessage: str = ""
UserInput: str = ""
RequestSessionId: str = ""
FunctionName: str = ""
TimeZone: str = ""
LLMRunnerType: str = "TurboLLM"
SourceLlm: str = ""
DestinationLlm: str = ""
MessageID: str = ""
LlmSessionStartName: str = ""
SwapFunctionName: str = ""
ChatAgentLocation: str = ""
ToolsDefinitionId: Optional[str] = None
JsonToolsBuilderSpec: Optional[str] = None
# ints / bools
TokensUsed: int = 0
IsUserLoggedIn: bool = False
IsFuncAck: bool = False
IsProcessed: bool = False
IsSystemLlm: bool = False
Timeout: Optional[int] = None
# complex (rename attrs; keep JSON names via alias)
FunctionCallId: str = ""
function_call_data: FunctionCallData = Field(default_factory=FunctionCallData, alias="FunctionCallData")
user_info: UserInfo = Field(default_factory=UserInfo, alias="UserInfo")
StartTimeUTC: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
# stacks
LlmStack: List[str] = Field(default_factory=list)
FunctionCallIdStack: List[str] = Field(default_factory=list)
FunctionNameStack: List[str] = Field(default_factory=list)
IsProcessedStack: List[bool] = Field(default_factory=list)
MessageIDStack: List[str] = Field(default_factory=list)
# function state flags
IsFunctionCall: bool = False
IsFunctionCallResponse: bool = False
IsFunctionCallError: bool = False
IsFunctionCallStatus: bool = False
IsFunctionStillRunning: bool = False
class ResultObj(BaseModel):
Message: str = ""
Success: bool = False
Data: Optional[Any] = None