Coverage for src/slide_stream/llm.py: 6%
70 statements
« prev ^ index » next coverage.py v7.9.1, created at 2025-06-24 15:45 +0800
« prev ^ index » next coverage.py v7.9.1, created at 2025-06-24 15:45 +0800
1"""LLM integration for Slide Stream."""
3import os
4from typing import Any
6from rich.console import Console
8err_console = Console(stderr=True, style="bold red")
11def get_llm_client(provider: str) -> Any:
12 """Get LLM client based on provider."""
13 if provider == "gemini":
14 try:
15 import google.generativeai as genai
17 api_key = os.getenv("GEMINI_API_KEY")
18 if not api_key:
19 raise ValueError("GEMINI_API_KEY environment variable not set.")
20 genai.configure(api_key=api_key)
21 return genai.GenerativeModel("gemini-1.5-flash")
22 except ImportError:
23 raise ImportError(
24 "Gemini library not found. Please install with: pip install slide-stream[gemini]"
25 )
27 elif provider == "openai":
28 try:
29 from openai import OpenAI
31 api_key = os.getenv("OPENAI_API_KEY")
32 if not api_key:
33 raise ValueError("OPENAI_API_KEY environment variable not set.")
34 return OpenAI(api_key=api_key)
35 except ImportError:
36 raise ImportError(
37 "OpenAI library not found. Please install with: pip install slide-stream[openai]"
38 )
40 elif provider == "claude":
41 try:
42 import anthropic
44 api_key = os.getenv("ANTHROPIC_API_KEY")
45 if not api_key:
46 raise ValueError("ANTHROPIC_API_KEY environment variable not set.")
47 return anthropic.Anthropic(api_key=api_key)
48 except ImportError:
49 raise ImportError(
50 "Anthropic library not found. Please install with: pip install slide-stream[claude]"
51 )
53 elif provider == "groq":
54 try:
55 from groq import Groq
57 api_key = os.getenv("GROQ_API_KEY")
58 if not api_key:
59 raise ValueError("GROQ_API_KEY environment variable not set.")
60 return Groq(api_key=api_key)
61 except ImportError:
62 raise ImportError(
63 "Groq library not found. Please install with: pip install slide-stream[groq]"
64 )
66 elif provider == "ollama":
67 try:
68 from openai import OpenAI
70 base_url = os.getenv("OLLAMA_BASE_URL", "http://localhost:11434")
71 return OpenAI(base_url=f"{base_url}/v1", api_key="ollama")
72 except ImportError:
73 raise ImportError(
74 "OpenAI library not found. Please install with: pip install slide-stream[openai]"
75 )
77 else:
78 raise ValueError(f"Unknown LLM provider: {provider}")
81def query_llm(
82 client: Any, provider: str, prompt_text: str, rich_console: Console
83) -> str | None:
84 """Query LLM with given prompt."""
85 rich_console.print(" - Querying LLM...")
87 try:
88 if provider == "gemini":
89 response = client.generate_content(prompt_text)
90 return response.text
92 elif provider in ["openai", "ollama"]:
93 model = "gpt-4o" if provider == "openai" else "llama3.1"
94 response = client.chat.completions.create(
95 model=model, messages=[{"role": "user", "content": prompt_text}]
96 )
97 return response.choices[0].message.content
99 elif provider == "claude":
100 response = client.messages.create(
101 model="claude-3-sonnet-20240229",
102 max_tokens=1024,
103 messages=[{"role": "user", "content": prompt_text}],
104 )
105 return response.content[0].text
107 elif provider == "groq":
108 response = client.chat.completions.create(
109 model="llama3-8b-8192",
110 messages=[{"role": "user", "content": prompt_text}],
111 )
112 return response.choices[0].message.content
114 return None
116 except Exception as e:
117 err_console.print(f" - LLM Error: {e}")
118 return None