Loading src/tdoc-ai/tdoc_ai/config.py +7 −8 Original line number Diff line number Diff line Loading @@ -111,6 +111,13 @@ class AiConfig(BaseConfigModel): description="Level of graph query answer generation (simple|medium|advanced)", ) @property def ai_cache_dir(self) -> Path: # Use CacheManager to resolve the embedding directory # e.g., ~/.tdoc-crawler/.ai/sentence-transformers/all-MiniLM-L6-v2 # The ai_embed_dir method handles the provider/model subdirectory structure return resolve_cache_manager(self.cache_manager_name).ai_embed_dir(self.embedding_model) @classmethod def from_env(cls, **overrides: str | int | Path | None) -> AiConfig: """Create config from environment variables.""" Loading Loading @@ -157,14 +164,6 @@ class AiConfig(BaseConfigModel): filtered_data = {k: v for k, v in data.items() if v is not None} return cls(**filtered_data) @property def ai_cache_dir(self) -> Path: # Use CacheManager to resolve the embedding directory # e.g., ~/.tdoc-crawler/.ai/sentence-transformers/all-MiniLM-L6-v2 # The ai_embed_dir method handles the provider/model subdirectory structure return resolve_cache_manager(self.cache_manager_name).ai_embed_dir(self.embedding_model) @model_validator(mode="after") def _validate_bounds(self) -> AiConfig: if self.abstract_max_words < self.abstract_min_words: Loading Loading
src/tdoc-ai/tdoc_ai/config.py +7 −8 Original line number Diff line number Diff line Loading @@ -111,6 +111,13 @@ class AiConfig(BaseConfigModel): description="Level of graph query answer generation (simple|medium|advanced)", ) @property def ai_cache_dir(self) -> Path: # Use CacheManager to resolve the embedding directory # e.g., ~/.tdoc-crawler/.ai/sentence-transformers/all-MiniLM-L6-v2 # The ai_embed_dir method handles the provider/model subdirectory structure return resolve_cache_manager(self.cache_manager_name).ai_embed_dir(self.embedding_model) @classmethod def from_env(cls, **overrides: str | int | Path | None) -> AiConfig: """Create config from environment variables.""" Loading Loading @@ -157,14 +164,6 @@ class AiConfig(BaseConfigModel): filtered_data = {k: v for k, v in data.items() if v is not None} return cls(**filtered_data) @property def ai_cache_dir(self) -> Path: # Use CacheManager to resolve the embedding directory # e.g., ~/.tdoc-crawler/.ai/sentence-transformers/all-MiniLM-L6-v2 # The ai_embed_dir method handles the provider/model subdirectory structure return resolve_cache_manager(self.cache_manager_name).ai_embed_dir(self.embedding_model) @model_validator(mode="after") def _validate_bounds(self) -> AiConfig: if self.abstract_max_words < self.abstract_min_words: Loading