Coverage for src\funcall\funcall.py: 75%
153 statements
« prev ^ index » next coverage.py v7.9.1, created at 2025-06-28 01:17 +0900
« prev ^ index » next coverage.py v7.9.1, created at 2025-06-28 01:17 +0900
1import asyncio
2import concurrent.futures
3import inspect
4import json
5from collections.abc import Callable
6from logging import getLogger
7from typing import Literal, Union, get_type_hints
9import litellm
10from openai.types.responses import (
11 FunctionToolParam,
12 ResponseFunctionToolCall,
13)
14from pydantic import BaseModel
16from funcall.decorators import ToolWrapper
17from funcall.types import ToolMeta, is_context_type
19from .metadata import generate_function_metadata
22def _convert_argument_type(value: list, hint: type) -> object:
23 """
24 Convert argument values to match expected types.
26 Args:
27 value: The value to convert
28 hint: The type hint to convert to
30 Returns:
31 Converted value
32 """
33 origin = getattr(hint, "__origin__", None)
34 result = value
35 if origin in (list, set, tuple):
36 args = getattr(hint, "__args__", [])
37 item_type = args[0] if args else str
38 result = [_convert_argument_type(v, item_type) for v in value]
39 elif origin is dict: 39 ↛ 40line 39 didn't jump to line 40 because the condition on line 39 was never true
40 result = value
41 elif getattr(hint, "__origin__", None) is Union:
42 args = getattr(hint, "__args__", [])
43 non_none_types = [a for a in args if a is not type(None)]
44 result = _convert_argument_type(value, non_none_types[0]) if len(non_none_types) == 1 else value
45 elif isinstance(hint, type) and BaseModel and issubclass(hint, BaseModel):
46 if isinstance(value, dict): 46 ↛ 51line 46 didn't jump to line 51 because the condition on line 46 was always true
47 fields = hint.model_fields
48 converted_data = {k: _convert_argument_type(v, fields[k].annotation) if k in fields else v for k, v in value.items()} # type: ignore
49 result = hint(**converted_data)
50 else:
51 result = value
52 elif hasattr(hint, "__dataclass_fields__"):
53 if isinstance(value, dict): 53 ↛ 58line 53 didn't jump to line 58 because the condition on line 53 was always true
54 field_types = {f: t.type for f, t in hint.__dataclass_fields__.items()}
55 converted_data = {k: _convert_argument_type(v, field_types.get(k, type(v))) for k, v in value.items()}
56 result = hint(**converted_data)
57 else:
58 result = value
59 return result
62def _is_async_function(func: object) -> bool:
63 """Check if a function is asynchronous."""
64 return inspect.iscoroutinefunction(func)
67logger = getLogger("funcall")
70class Funcall:
71 """Handler for function calling in LLM interactions."""
73 def __init__(self, functions: list[Callable] | None = None) -> None:
74 """
75 Initialize the function call handler.
77 Args:
78 functions: List of functions to register
79 """
80 self.functions = functions or []
81 self.function_registry = {func.__name__: func for func in self.functions}
83 def get_tools(self, target: Literal["response", "completion"] = "completion") -> list[FunctionToolParam]:
84 """
85 Get tool definitions for the specified target platform.
87 Args:
88 target: Target platform ("response" or "completion")
90 Returns:
91 List of function tool parameters
92 """
93 return [generate_function_metadata(func, target) for func in self.functions] # type: ignore
95 def _prepare_function_execution(
96 self,
97 func_name: str,
98 args: str,
99 context: object = None,
100 ) -> tuple[Callable, dict]:
101 """
102 Prepare function call arguments and context injection.
104 Args:
105 func_name: Name of the function to call
106 args: JSON string of function arguments
107 context: Context object to inject
109 Returns:
110 Tuple of (function, prepared_kwargs)
111 """
112 if func_name not in self.function_registry:
113 msg = f"Function {func_name} not found"
114 raise ValueError(msg)
116 func = self.function_registry[func_name]
117 signature = inspect.signature(func)
118 type_hints = get_type_hints(func)
119 arguments = json.loads(args)
121 # Find non-context parameters
122 non_context_params = [name for name in signature.parameters if not is_context_type(type_hints.get(name, str))]
124 # Handle single parameter case
125 if len(non_context_params) == 1 and (not isinstance(arguments, dict) or set(arguments.keys()) != set(non_context_params)):
126 arguments = {non_context_params[0]: arguments}
128 # Prepare final kwargs with type conversion and context injection
129 prepared_kwargs = {}
130 for param_name in signature.parameters:
131 hint = type_hints.get(param_name, str)
133 if is_context_type(hint):
134 prepared_kwargs[param_name] = context
135 elif param_name in arguments: 135 ↛ 130line 135 didn't jump to line 130 because the condition on line 135 was always true
136 prepared_kwargs[param_name] = _convert_argument_type(arguments[param_name], hint) # type: ignore
138 return func, prepared_kwargs
140 def _execute_sync_in_async_context(self, func: Callable, kwargs: dict) -> object:
141 """Execute synchronous function in async context safely."""
142 try:
143 loop = asyncio.get_event_loop()
144 if loop.is_running(): 144 ↛ 146line 144 didn't jump to line 146 because the condition on line 144 was never true
145 # If already in event loop, use thread pool
146 with concurrent.futures.ThreadPoolExecutor() as executor:
147 future = executor.submit(func, **kwargs)
148 return future.result()
149 else:
150 return loop.run_until_complete(func(**kwargs))
151 except RuntimeError:
152 # No event loop exists, create new one
153 return asyncio.run(func(**kwargs))
155 def call_function(
156 self,
157 name: str,
158 arguments: str,
159 context: object = None,
160 ) -> object:
161 """
162 Call a function by name with JSON arguments synchronously.
164 Args:
165 name: Name of the function to call
166 arguments: JSON string of function arguments
167 context: Context object to inject (optional)
169 Returns:
170 Function execution result
172 Raises:
173 ValueError: If function is not found
174 json.JSONDecodeError: If arguments are not valid JSON
175 """
176 func, kwargs = self._prepare_function_execution(name, arguments, context)
178 if isinstance(func, ToolWrapper):
179 if func.is_async: 179 ↛ 180line 179 didn't jump to line 180 because the condition on line 179 was never true
180 logger.warning(
181 "Function %s is async but being called synchronously. Consider using call_function_async.",
182 name,
183 )
184 return self._execute_sync_in_async_context(func, kwargs)
185 return func(**kwargs)
187 if _is_async_function(func):
188 logger.warning(
189 "Function %s is async but being called synchronously. Consider using call_function_async.",
190 name,
191 )
192 return self._execute_sync_in_async_context(func, kwargs)
194 return func(**kwargs)
196 async def call_function_async(
197 self,
198 name: str,
199 arguments: str,
200 context: object = None,
201 ) -> object:
202 """
203 Call a function by name with JSON arguments asynchronously.
205 Args:
206 name: Name of the function to call
207 arguments: JSON string of function arguments
208 context: Context object to inject (optional)
210 Returns:
211 Function execution result
213 Raises:
214 ValueError: If function is not found
215 json.JSONDecodeError: If arguments are not valid JSON
216 """
217 func, kwargs = self._prepare_function_execution(name, arguments, context)
218 if isinstance(func, ToolWrapper):
219 if func.is_async:
220 return await func.acall(**kwargs)
221 # Run sync function in thread pool to avoid blocking event loop
222 loop = asyncio.get_event_loop()
223 return await loop.run_in_executor(None, lambda: func(**kwargs))
225 if _is_async_function(func): 225 ↛ 229line 225 didn't jump to line 229 because the condition on line 225 was always true
226 return await func(**kwargs)
228 # Run sync function in thread pool to avoid blocking event loop
229 loop = asyncio.get_event_loop()
230 return await loop.run_in_executor(None, lambda: func(**kwargs))
232 def handle_openai_function_call(
233 self,
234 call: ResponseFunctionToolCall,
235 context: object = None,
236 ) -> object:
237 """
238 Handle OpenAI function call synchronously.
240 Args:
241 call: OpenAI function tool call
242 context: Context object to inject
244 Returns:
245 Function execution result
246 """
247 if not isinstance(call, ResponseFunctionToolCall): 247 ↛ 248line 247 didn't jump to line 248 because the condition on line 247 was never true
248 msg = "call must be an instance of ResponseFunctionToolCall"
249 raise TypeError(msg)
251 return self.call_function(call.name, call.arguments, context)
253 async def handle_openai_function_call_async(
254 self,
255 call: ResponseFunctionToolCall,
256 context: object = None,
257 ) -> object:
258 """
259 Handle OpenAI function call asynchronously.
261 Args:
262 call: OpenAI function tool call
263 context: Context object to inject
265 Returns:
266 Function execution result
267 """
268 if not isinstance(call, ResponseFunctionToolCall): 268 ↛ 269line 268 didn't jump to line 269 because the condition on line 268 was never true
269 msg = "call must be an instance of ResponseFunctionToolCall"
270 raise TypeError(msg)
272 return await self.call_function_async(call.name, call.arguments, context)
274 def handle_litellm_function_call(
275 self,
276 call: litellm.ChatCompletionMessageToolCall,
277 context: object = None,
278 ) -> object:
279 """
280 Handle LiteLLM function call synchronously.
282 Args:
283 call: LiteLLM function tool call
284 context: Context object to inject
286 Returns:
287 Function execution result
288 """
289 if not isinstance(call, litellm.ChatCompletionMessageToolCall): 289 ↛ 290line 289 didn't jump to line 290 because the condition on line 289 was never true
290 msg = "call must be an instance of litellm.ChatCompletionMessageToolCall"
291 raise TypeError(msg)
292 if not call.function: 292 ↛ 293line 292 didn't jump to line 293 because the condition on line 292 was never true
293 msg = "call.function must not be None"
294 raise ValueError(msg)
295 if not call.function.name: 295 ↛ 296line 295 didn't jump to line 296 because the condition on line 295 was never true
296 msg = "call.function.name must not be empty"
297 raise ValueError(msg)
298 return self.call_function(
299 call.function.name,
300 call.function.arguments,
301 context,
302 )
304 async def handle_litellm_function_call_async(
305 self,
306 call: litellm.ChatCompletionMessageToolCall,
307 context: object = None,
308 ) -> object:
309 """
310 Handle LiteLLM function call asynchronously.
312 Args:
313 call: LiteLLM function tool call
314 context: Context object to inject
316 Returns:
317 Function execution result
318 """
319 if not isinstance(call, litellm.ChatCompletionMessageToolCall): 319 ↛ 320line 319 didn't jump to line 320 because the condition on line 319 was never true
320 msg = "call must be an instance of litellm.ChatCompletionMessageToolCall"
321 raise TypeError(msg)
322 if not call.function: 322 ↛ 323line 322 didn't jump to line 323 because the condition on line 322 was never true
323 msg = "call.function must not be None"
324 raise ValueError(msg)
325 if not call.function.name: 325 ↛ 326line 325 didn't jump to line 326 because the condition on line 325 was never true
326 msg = "call.function.name must not be empty"
327 raise ValueError(msg)
328 return await self.call_function_async(
329 call.function.name,
330 call.function.arguments,
331 context,
332 )
334 def handle_function_call(
335 self,
336 call: ResponseFunctionToolCall | litellm.ChatCompletionMessageToolCall,
337 context: object = None,
338 ) -> object:
339 """
340 Handle function call synchronously (unified interface).
342 Args:
343 call: Function tool call (OpenAI or LiteLLM)
344 context: Context object to inject
346 Returns:
347 Function execution result
348 """
349 if isinstance(call, ResponseFunctionToolCall):
350 return self.handle_openai_function_call(call, context)
351 if isinstance(call, litellm.ChatCompletionMessageToolCall): 351 ↛ 353line 351 didn't jump to line 353 because the condition on line 351 was always true
352 return self.handle_litellm_function_call(call, context)
353 msg = "call must be an instance of ResponseFunctionToolCall or litellm.ChatCompletionMessageToolCall"
354 raise TypeError(msg)
356 async def handle_function_call_async(
357 self,
358 call: ResponseFunctionToolCall | litellm.ChatCompletionMessageToolCall,
359 context: object = None,
360 ) -> object:
361 """
362 Handle function call asynchronously (unified interface).
364 Args:
365 call: Function tool call (OpenAI or LiteLLM)
366 context: Context object to inject
368 Returns:
369 Function execution result
370 """
371 if isinstance(call, ResponseFunctionToolCall):
372 return await self.handle_openai_function_call_async(call, context)
373 if isinstance(call, litellm.ChatCompletionMessageToolCall): 373 ↛ 375line 373 didn't jump to line 375 because the condition on line 373 was always true
374 return await self.handle_litellm_function_call_async(call, context)
375 msg = "call must be an instance of ResponseFunctionToolCall or litellm.ChatCompletionMessageToolCall"
376 raise TypeError(msg)
378 def get_tool_meta(self, name: str) -> ToolMeta:
379 """
380 Get metadata for a registered function by name.
382 Args:
383 name: Name of the function
385 Returns:
386 Function metadata dictionary
387 """
388 if name not in self.function_registry: 388 ↛ 389line 388 didn't jump to line 389 because the condition on line 388 was never true
389 msg = f"Function {name} not found"
390 raise ValueError(msg)
392 func = self.function_registry[name]
393 if isinstance(func, ToolWrapper): 393 ↛ 398line 393 didn't jump to line 398 because the condition on line 393 was always true
394 return ToolMeta(
395 require_confirm=func.require_confirm,
396 return_direct=func.return_direct,
397 )
398 return ToolMeta(
399 require_confirm=False,
400 return_direct=False,
401 )