responses.rs 5.14 KB
Newer Older
1
2
use std::{collections::HashMap, sync::Arc};

3
4
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
5
use serde_json::Value;
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22

/// Response identifier
#[derive(Debug, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub struct ResponseId(pub String);

impl ResponseId {
    pub fn new() -> Self {
        Self(ulid::Ulid::new().to_string())
    }
}

impl Default for ResponseId {
    fn default() -> Self {
        Self::new()
    }
}

23
24
25
26
27
28
29
30
31
32
33
34
impl From<String> for ResponseId {
    fn from(value: String) -> Self {
        Self(value)
    }
}

impl From<&str> for ResponseId {
    fn from(value: &str) -> Self {
        Self(value.to_string())
    }
}

35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
/// Stored response data
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StoredResponse {
    /// Unique response ID
    pub id: ResponseId,

    /// ID of the previous response in the chain (if any)
    pub previous_response_id: Option<ResponseId>,

    /// The user input for this response
    pub input: String,

    /// System instructions used
    pub instructions: Option<String>,

    /// The model's output
    pub output: String,

    /// Tool calls made by the model (if any)
54
    pub tool_calls: Vec<Value>,
55
56

    /// Custom metadata
57
    pub metadata: HashMap<String, Value>,
58
59
60
61
62
63
64
65
66

    /// When this response was created
    pub created_at: chrono::DateTime<chrono::Utc>,

    /// User identifier (optional)
    pub user: Option<String>,

    /// Model used for generation
    pub model: Option<String>,
67

68
69
70
71
    /// Conversation id if associated with a conversation
    #[serde(default)]
    pub conversation_id: Option<String>,

72
73
74
    /// Raw OpenAI response payload
    #[serde(default)]
    pub raw_response: Value,
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
}

impl StoredResponse {
    pub fn new(input: String, output: String, previous_response_id: Option<ResponseId>) -> Self {
        Self {
            id: ResponseId::new(),
            previous_response_id,
            input,
            instructions: None,
            output,
            tool_calls: Vec::new(),
            metadata: HashMap::new(),
            created_at: chrono::Utc::now(),
            user: None,
            model: None,
90
            conversation_id: None,
91
            raw_response: Value::Null,
92
93
94
95
96
97
98
99
100
101
102
        }
    }
}

/// Response chain - a sequence of related responses
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ResponseChain {
    /// The responses in chronological order
    pub responses: Vec<StoredResponse>,

    /// Metadata about the chain
103
    pub metadata: HashMap<String, Value>,
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
}

impl Default for ResponseChain {
    fn default() -> Self {
        Self::new()
    }
}

impl ResponseChain {
    pub fn new() -> Self {
        Self {
            responses: Vec::new(),
            metadata: HashMap::new(),
        }
    }

    /// Get the ID of the most recent response in the chain
    pub fn latest_response_id(&self) -> Option<&ResponseId> {
        self.responses.last().map(|r| &r.id)
    }

    /// Add a response to the chain
    pub fn add_response(&mut self, response: StoredResponse) {
        self.responses.push(response);
    }

    /// Build context from the chain for the next request
    pub fn build_context(&self, max_responses: Option<usize>) -> Vec<(String, String)> {
        let responses = if let Some(max) = max_responses {
            let start = self.responses.len().saturating_sub(max);
            &self.responses[start..]
        } else {
            &self.responses[..]
        };

        responses
            .iter()
            .map(|r| (r.input.clone(), r.output.clone()))
            .collect()
    }
}

/// Error type for response storage operations
#[derive(Debug, thiserror::Error)]
pub enum ResponseStorageError {
    #[error("Response not found: {0}")]
    ResponseNotFound(String),

    #[error("Invalid chain: {0}")]
    InvalidChain(String),

    #[error("Storage error: {0}")]
    StorageError(String),

    #[error("Serialization error: {0}")]
    SerializationError(#[from] serde_json::Error),
}

pub type Result<T> = std::result::Result<T, ResponseStorageError>;

/// Trait for response storage
#[async_trait]
pub trait ResponseStorage: Send + Sync {
    /// Store a new response
    async fn store_response(&self, response: StoredResponse) -> Result<ResponseId>;

    /// Get a response by ID
    async fn get_response(&self, response_id: &ResponseId) -> Result<Option<StoredResponse>>;

    /// Delete a response
    async fn delete_response(&self, response_id: &ResponseId) -> Result<()>;

    /// Get the chain of responses leading to a given response
    /// Returns responses in chronological order (oldest first)
    async fn get_response_chain(
        &self,
        response_id: &ResponseId,
        max_depth: Option<usize>,
    ) -> Result<ResponseChain>;

    /// List recent responses for a user
    async fn list_user_responses(
        &self,
        user: &str,
        limit: Option<usize>,
    ) -> Result<Vec<StoredResponse>>;

    /// Delete all responses for a user
    async fn delete_user_responses(&self, user: &str) -> Result<usize>;
}

/// Type alias for shared storage
pub type SharedResponseStorage = Arc<dyn ResponseStorage>;
197
198
199
200
201
202

impl Default for StoredResponse {
    fn default() -> Self {
        Self::new(String::new(), String::new(), None)
    }
}