aimx/inference/
prompt.rs

1//! Prompt construction for inference workflows.
2//!
3//! Combines [`Modifier`] and [`OutputFormat`] to build system and user
4//! messages from a [`WorkflowLike`] for a given [`Capability`].
5
6use crate::{
7    aim::{ContextLike, WorkflowLike},
8    inference::{Capability, Modifier, OutputFormat},
9};
10use std::sync::Arc;
11
12/// Combined system + user messages for an inference call.
13#[derive(Debug, PartialEq, Clone)]
14pub struct Prompt {
15    system: Arc<str>,
16    user: Arc<str>,
17}
18
19impl Prompt {
20    /// Create a new Prompt from raw system and user strings.
21    pub fn new(system: Arc<str>, user: Arc<str>) -> Self {
22        Self { system, user }
23    }
24
25    /// Inference a Prompt from a workflow for a given model capability.
26    ///
27    /// This is the high-level orchestration used by the inference pipeline.
28    /// It is deterministic and side-effect free aside from evaluating rules
29    /// via the provided context.
30    pub fn generate (
31        workflow: Arc<dyn WorkflowLike>,
32        context: &mut dyn ContextLike,
33        capability: &Capability,
34    ) -> Self {
35        let mut format = OutputFormat::new(capability);
36        let modifier = Modifier::new(workflow, context);
37
38        // Section ordering is intentionally fixed and is verified by tests.
39        modifier.role(&mut format);
40        modifier.others(&mut format);
41        modifier.instructions(&mut format);
42        modifier.output_format(&mut format);
43        modifier.examples(&mut format);
44        modifier.user_text(&mut format);
45
46        let system = modifier.system_prompt();
47        let user = Arc::from(format.finish());
48        Self::new(system, user)
49    }
50
51    /// Get the system message.
52    pub fn system(&self) -> Arc<str> {
53        self.system.clone()
54    }
55
56    /// Get the user message.
57    pub fn user(&self) -> Arc<str> {
58        self.user.clone()
59    }
60}