src/Inference/functions.php source

1 <?php
2
3 namespace mini;
4
5 use mini\Inference\InferenceServiceInterface;
6
7 /**
8 * Inference Feature - Global Helper Functions
9 *
10 * Provides the public API for LLM-based structured inference.
11 */
12
13 // Register InferenceServiceInterface - apps must provide implementation via config
14 Mini::$mini->addService(
15 InferenceServiceInterface::class,
16 Lifetime::Singleton,
17 fn() => Mini::$mini->loadServiceConfig(InferenceServiceInterface::class)
18 );
19
20 /**
21 * Get the inference service instance
22 *
23 * Returns the configured InferenceServiceInterface implementation for
24 * LLM-based structured evaluation.
25 *
26 * Usage:
27 * // Boolean question
28 * $result = inference()->evaluate("Is this spam?\n\n$text", validator()->enum([true, false]));
29 *
30 * // Classification
31 * $category = inference()->evaluate("Classify:\n\n$text", validator()->enum(['bug', 'feature', 'question']));
32 *
33 * // Structured extraction
34 * $data = inference()->evaluate($prompt, validator()->type('object')->properties([
35 * 'summary' => validator()->type('string')->required(),
36 * 'priority' => validator()->enum(['low', 'medium', 'high']),
37 * ]));
38 *
39 * @return InferenceServiceInterface Inference service instance
40 * @throws \mini\Exceptions\ConfigurationRequiredException If no implementation configured
41 */
42 function inference(): InferenceServiceInterface
43 {
44 return Mini::$mini->get(InferenceServiceInterface::class);
45 }
46