Coverage for packages/core/src/langgate/core/models.py: 100%

73 statements  

« prev     ^ index     » next       coverage.py v7.7.1, created at 2025-04-09 21:23 +0000

1"""Core domain data models for LangGate.""" 

2 

3from datetime import UTC, datetime 

4from decimal import Decimal 

5from typing import Annotated, Any, NewType 

6 

7from pydantic import BaseModel, ConfigDict, Field, SecretStr 

8 

9# Type aliases for flexibility while maintaining naming compatibility 

10ServiceProviderId = NewType("ServiceProviderId", str) 

11# Model provider might differ from the inference service provider 

12# The service provider is not intended to be exposed to external consumers of the registry 

13# The service provider is used by the proxy for routing requests to the correct service 

14ModelProviderId = NewType("ModelProviderId", str) 

15 

16# Common inference service providers for convenience 

17SERVICE_PROVIDER_OPENAI = ServiceProviderId("openai") 

18SERVICE_PROVIDER_ANTHROPIC = ServiceProviderId("anthropic") 

19SERVICE_PROVIDER_GEMINI = ServiceProviderId("gemini") 

20SERVICE_PROVIDER_VERTEX = ServiceProviderId("vertex") 

21SERVICE_PROVIDER_DEEPSEEK = ServiceProviderId("deepseek") 

22SERVICE_PROVIDER_ALIBABA = ServiceProviderId("alibaba") 

23SERVICE_PROVIDER_XAI = ServiceProviderId("xai") 

24SERVICE_PROVIDER_ELEUTHERIA_VLLM = ServiceProviderId("eleutheria/vllm") 

25SERVICE_PROVIDER_GROQ = ServiceProviderId("groq") 

26SERVICE_PROVIDER_OPENROUTER = ServiceProviderId("openrouter") 

27SERVICE_PROVIDER_FIREWORKS = ServiceProviderId("fireworks_ai") 

28SERVICE_PROVIDER_HUGGINGFACE = ServiceProviderId("huggingface") 

29SERVICE_PROVIDER_COHERE = ServiceProviderId("cohere") 

30SERVICE_PROVIDER_BEDROCK = ServiceProviderId("bedrock") 

31SERVICE_PROVIDER_AZURE_OPENAI = ServiceProviderId("azure_openai") 

32 

33# Common model providers for convenience 

34MODEL_PROVIDER_OPENAI = ModelProviderId("openai") 

35MODEL_PROVIDER_ANTHROPIC = ModelProviderId("anthropic") 

36MODEL_PROVIDER_META = ModelProviderId("meta") 

37MODEL_PROVIDER_GOOGLE = ModelProviderId("google") 

38MODEL_PROVIDER_DEEPSEEK = ModelProviderId("deepseek") 

39MODEL_PROVIDER_ALIBABA = ModelProviderId("alibaba") 

40MODEL_PROVIDER_XAI = ModelProviderId("xai") 

41MODEL_PROVIDER_COHERE = ModelProviderId("cohere") 

42MODEL_PROVIDER_ELEUTHERIA = ModelProviderId("eleutheria") 

43 

44 

45class ServiceProvider(BaseModel): 

46 """Information about a service provider (API service).""" 

47 

48 id: ServiceProviderId 

49 base_url: str 

50 api_key: SecretStr 

51 default_params: dict[str, Any] = Field(default_factory=dict) 

52 

53 

54class ModelProvider(BaseModel): 

55 """Information about a model provider (creator).""" 

56 

57 id: ModelProviderId 

58 name: str 

59 description: str | None = None 

60 

61 

62class ContextWindow(BaseModel): 

63 """Context window information for a model.""" 

64 

65 max_input_tokens: int = 0 

66 max_output_tokens: int = 0 

67 

68 model_config = ConfigDict(extra="allow") 

69 

70 

71class ModelCapabilities(BaseModel): 

72 """Capabilities of a language model.""" 

73 

74 supports_tools: bool | None = None 

75 supports_parallel_tool_calls: bool | None = None 

76 supports_vision: bool | None = None 

77 supports_audio_input: bool | None = None 

78 supports_audio_output: bool | None = None 

79 supports_prompt_caching: bool | None = None 

80 supports_response_schema: bool | None = None 

81 supports_system_messages: bool | None = None 

82 

83 model_config = ConfigDict(extra="allow") 

84 

85 

86TokenCost = Annotated[Decimal, "TokenCost"] 

87Percentage = Annotated[Decimal, "Percentage"] 

88TokenUsage = Annotated[Decimal, "TokenUsage"] 

89 

90 

91class ModelCost(BaseModel): 

92 """Cost information for a language model.""" 

93 

94 input_cost_per_token: TokenCost = Field(default_factory=lambda: Decimal()) 

95 output_cost_per_token: TokenCost = Field(default_factory=lambda: Decimal()) 

96 input_cost_per_token_batches: TokenCost | None = None 

97 output_cost_per_token_batches: TokenCost | None = None 

98 cache_read_input_token_cost: TokenCost | None = None 

99 

100 model_config = ConfigDict(extra="allow") 

101 

102 

103class LLMInfo(BaseModel): 

104 """Information about a language model.""" 

105 

106 id: str # "gpt-4o" 

107 name: str 

108 provider: ModelProvider # Who created it (shown to users) 

109 description: str | None = None 

110 costs: ModelCost = Field(default_factory=ModelCost) 

111 capabilities: ModelCapabilities = Field(default_factory=ModelCapabilities) 

112 context_window: ContextWindow = Field(default_factory=ContextWindow) 

113 updated_dt: datetime = Field(default_factory=lambda: datetime.now(UTC)) 

114 

115 model_config = ConfigDict(extra="allow")