Coverage for src/slide_stream/llm.py: 6%

70 statements  

« prev     ^ index     » next       coverage.py v7.9.1, created at 2025-06-24 15:45 +0800

1"""LLM integration for Slide Stream.""" 

2 

3import os 

4from typing import Any 

5 

6from rich.console import Console 

7 

8err_console = Console(stderr=True, style="bold red") 

9 

10 

11def get_llm_client(provider: str) -> Any: 

12 """Get LLM client based on provider.""" 

13 if provider == "gemini": 

14 try: 

15 import google.generativeai as genai 

16 

17 api_key = os.getenv("GEMINI_API_KEY") 

18 if not api_key: 

19 raise ValueError("GEMINI_API_KEY environment variable not set.") 

20 genai.configure(api_key=api_key) 

21 return genai.GenerativeModel("gemini-1.5-flash") 

22 except ImportError: 

23 raise ImportError( 

24 "Gemini library not found. Please install with: pip install slide-stream[gemini]" 

25 ) 

26 

27 elif provider == "openai": 

28 try: 

29 from openai import OpenAI 

30 

31 api_key = os.getenv("OPENAI_API_KEY") 

32 if not api_key: 

33 raise ValueError("OPENAI_API_KEY environment variable not set.") 

34 return OpenAI(api_key=api_key) 

35 except ImportError: 

36 raise ImportError( 

37 "OpenAI library not found. Please install with: pip install slide-stream[openai]" 

38 ) 

39 

40 elif provider == "claude": 

41 try: 

42 import anthropic 

43 

44 api_key = os.getenv("ANTHROPIC_API_KEY") 

45 if not api_key: 

46 raise ValueError("ANTHROPIC_API_KEY environment variable not set.") 

47 return anthropic.Anthropic(api_key=api_key) 

48 except ImportError: 

49 raise ImportError( 

50 "Anthropic library not found. Please install with: pip install slide-stream[claude]" 

51 ) 

52 

53 elif provider == "groq": 

54 try: 

55 from groq import Groq 

56 

57 api_key = os.getenv("GROQ_API_KEY") 

58 if not api_key: 

59 raise ValueError("GROQ_API_KEY environment variable not set.") 

60 return Groq(api_key=api_key) 

61 except ImportError: 

62 raise ImportError( 

63 "Groq library not found. Please install with: pip install slide-stream[groq]" 

64 ) 

65 

66 elif provider == "ollama": 

67 try: 

68 from openai import OpenAI 

69 

70 base_url = os.getenv("OLLAMA_BASE_URL", "http://localhost:11434") 

71 return OpenAI(base_url=f"{base_url}/v1", api_key="ollama") 

72 except ImportError: 

73 raise ImportError( 

74 "OpenAI library not found. Please install with: pip install slide-stream[openai]" 

75 ) 

76 

77 else: 

78 raise ValueError(f"Unknown LLM provider: {provider}") 

79 

80 

81def query_llm( 

82 client: Any, provider: str, prompt_text: str, rich_console: Console 

83) -> str | None: 

84 """Query LLM with given prompt.""" 

85 rich_console.print(" - Querying LLM...") 

86 

87 try: 

88 if provider == "gemini": 

89 response = client.generate_content(prompt_text) 

90 return response.text 

91 

92 elif provider in ["openai", "ollama"]: 

93 model = "gpt-4o" if provider == "openai" else "llama3.1" 

94 response = client.chat.completions.create( 

95 model=model, messages=[{"role": "user", "content": prompt_text}] 

96 ) 

97 return response.choices[0].message.content 

98 

99 elif provider == "claude": 

100 response = client.messages.create( 

101 model="claude-3-sonnet-20240229", 

102 max_tokens=1024, 

103 messages=[{"role": "user", "content": prompt_text}], 

104 ) 

105 return response.content[0].text 

106 

107 elif provider == "groq": 

108 response = client.chat.completions.create( 

109 model="llama3-8b-8192", 

110 messages=[{"role": "user", "content": prompt_text}], 

111 ) 

112 return response.choices[0].message.content 

113 

114 return None 

115 

116 except Exception as e: 

117 err_console.print(f" - LLM Error: {e}") 

118 return None