refactor: add specta::Type to structs and remove live_photos from SidecarKind enum

This commit is contained in:
Jamie Pine 2025-10-30 16:25:35 -07:00
parent b3d94096b7
commit d907c27cc4
7 changed files with 30 additions and 329 deletions

View File

@ -823,12 +823,6 @@ impl EntryProcessor {
JobError::execution(format!("Failed to link content identity to entry: {}", e))
})?;
// TODO: Re-enable Live Photo detection after sidecar system is fully working
// if let Some(live_photo) = LivePhotoDetector::detect_pair(path) {
// // This would create a virtual sidecar for the video component
// Self::handle_live_photo_detection(ctx, content_id, content_hash, path, &live_photo, library_id).await?;
// }
Ok(ContentLinkResult {
content_identity: content_model,
entry: updated_entry,
@ -836,36 +830,6 @@ impl EntryProcessor {
})
}
// TODO: Refactor this to use virtual sidecars when re-enabling
// /// Handle Live Photo detection - creates a virtual sidecar for the video component
// async fn handle_live_photo_detection(
// ctx: &JobContext<'_>,
// image_content_uuid: &Uuid,
// path: &Path,
// live_photo: &crate::ops::media::LivePhoto,
// library_id: Uuid,
// ) -> Result<(), JobError> {
// // Only process if this is the image component
// if path != live_photo.image_path {
// return Ok(());
// }
//
// // The video becomes a virtual sidecar of the image
// // This would:
// // 1. Create a sidecar record with kind = LivePhotoVideo
// // 2. Set the sidecar's content_uuid to the image's content UUID
// // 3. Store the video file in the sidecar location
// // 4. The video file itself wouldn't get its own entry in the main entries table
//
// ctx.log(format!(
// "Would create Live Photo sidecar: {} (image) -> {} (video sidecar)",
// live_photo.image_path.display(),
// live_photo.video_path.display()
// ));
//
// Ok(())
// }
/// Simple move entry within existing transaction (no directory path cascade updates)
pub async fn simple_move_entry_in_conn(
state: &mut IndexerState,

View File

@ -1,273 +0,0 @@
//! Live Photo detection and handling
//!
//! NOTE: This should be moved to the Photos extension
//!
//! When enabled, Live Photos are handled as follows:
//! 1. During indexing, when we encounter an image file (HEIC/JPEG), we check for a matching video (MOV/MP4)
//! 2. If found, the video becomes a virtual sidecar of the image
//! 3. The video file is NOT indexed as a separate entry - it only exists as a sidecar
//! 4. This prevents duplicate processing and keeps Live Photos as single logical units
use crate::{
library::Library,
ops::sidecar::{SidecarFormat, SidecarKind, SidecarVariant},
service::sidecar_manager::SidecarManager,
};
use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
use uuid::Uuid;
/// Represents a detected Live Photo pair
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LivePhoto {
/// The image component (HEIC/JPEG)
pub image_path: PathBuf,
/// The video component (MOV/MP4)
pub video_path: PathBuf,
/// Optional asset identifier if found in metadata
pub asset_id: Option<String>,
}
/// Detects Live Photo pairs based on naming conventions and timestamps
pub struct LivePhotoDetector;
impl LivePhotoDetector {
/// Known Live Photo patterns:
/// 1. Apple Photos exports: `IMG_1234.HEIC` + `IMG_1234.MOV`
/// 2. iCloud Photos: `photo.heic` + `photo.mov` (same name, different extension)
/// 3. Some apps: `photo.jpg` + `photo.mp4`
pub fn detect_pair(path: &Path) -> Option<LivePhoto> {
let file_name = path.file_stem()?.to_str()?;
let extension = path.extension()?.to_str()?.to_lowercase();
// Check if this is an image file
let is_image = matches!(extension.as_str(), "heic" | "heif" | "jpg" | "jpeg");
let is_video = matches!(extension.as_str(), "mov" | "mp4");
if !is_image && !is_video {
return None;
}
let parent = path.parent()?;
// Define the counterpart we're looking for
let counterpart_extensions = if is_image {
vec!["mov", "mp4"]
} else {
vec!["heic", "heif", "jpg", "jpeg"]
};
// Look for matching counterpart
for ext in counterpart_extensions {
let counterpart_path = parent.join(format!("{}.{}", file_name, ext));
if counterpart_path.exists() && counterpart_path != path {
// Found a match!
let (image_path, video_path) = if is_image {
(path.to_path_buf(), counterpart_path)
} else {
(counterpart_path, path.to_path_buf())
};
return Some(LivePhoto {
image_path,
video_path,
asset_id: None, // Could be extracted from EXIF/metadata later
});
}
}
None
}
/// Check if two files form a Live Photo pair
pub fn is_live_photo_pair(image_path: &Path, video_path: &Path) -> bool {
// Must be in same directory
if image_path.parent() != video_path.parent() {
return false;
}
// Must have same base name
if image_path.file_stem() != video_path.file_stem() {
return false;
}
// Check extensions
let img_ext = image_path
.extension()
.and_then(|e| e.to_str())
.map(|e| e.to_lowercase())
.unwrap_or_default();
let vid_ext = video_path
.extension()
.and_then(|e| e.to_str())
.map(|e| e.to_lowercase())
.unwrap_or_default();
let valid_image = matches!(img_ext.as_str(), "heic" | "heif" | "jpg" | "jpeg");
let valid_video = matches!(vid_ext.as_str(), "mov" | "mp4");
valid_image && valid_video
}
/// Generate a deterministic UUID for a Live Photo pair
/// This ensures both components reference the same Live Photo ID
pub fn generate_live_photo_id(image_hash: &str, video_hash: &str) -> Uuid {
// Use the smaller hash first for deterministic ordering
let (first, second) = if image_hash < video_hash {
(image_hash, video_hash)
} else {
(video_hash, image_hash)
};
let combined = format!("{}-{}", first, second);
// Use a namespace UUID for Live Photos
const LIVE_PHOTO_NAMESPACE: Uuid = Uuid::from_bytes([
0x4c, 0x69, 0x76, 0x65, 0x50, 0x68, 0x6f, 0x74, 0x6f, 0x4e, 0x53, 0x00, 0x00, 0x00,
0x00, 0x01,
]);
Uuid::new_v5(&LIVE_PHOTO_NAMESPACE, combined.as_bytes())
}
/// Create a reference sidecar for a Live Photo video
/// This is called during indexing when we find a Live Photo pair
pub async fn create_live_photo_reference_sidecar(
library: &Library,
sidecar_manager: &SidecarManager,
image_content_uuid: &Uuid,
video_entry_id: i32,
video_size: u64,
video_checksum: Option<String>,
) -> Result<()> {
// Create a reference sidecar for the video component
// It references the original video entry without moving the file
sidecar_manager
.create_reference_sidecar(
library,
image_content_uuid, // The video is a sidecar of the image
video_entry_id, // References the video entry
&SidecarKind::LivePhotoVideo,
&SidecarVariant::new("original"),
&SidecarFormat::Mp4, // Or MOV based on actual format
video_size,
video_checksum,
)
.await?;
Ok(())
}
/// Example of how Live Photos would be handled during indexing
/// NOTE: This is a demonstration - actual integration would be in the indexer
#[allow(dead_code)]
async fn example_live_photo_indexing_flow(
library: &Library,
sidecar_manager: &SidecarManager,
image_path: &Path,
image_content_uuid: &Uuid,
) -> Result<()> {
// During indexing, when we process an image file...
if let Some(live_photo) = Self::detect_pair(image_path) {
// We found a Live Photo pair!
// The video would normally be indexed as an entry
// But instead, we skip indexing it and create a reference sidecar
// In real implementation, we would:
// 1. Get or create the video entry (minimal record)
// 2. Get the video's size and checksum
// 3. Create the reference sidecar
let video_entry_id = 12345; // This would come from the database
let video_size = 1024 * 1024 * 10; // 10MB, would come from fs::metadata
let video_checksum = Some("abc123".to_string()); // Would be computed
// Create the reference sidecar
Self::create_live_photo_reference_sidecar(
library,
sidecar_manager,
image_content_uuid,
video_entry_id,
video_size,
video_checksum,
)
.await?;
// The video is now tracked as a virtual sidecar of the image
// It won't appear in search results or galleries as a separate item
// But can be accessed through the image's sidecar API
}
Ok(())
}
/// Bulk convert reference sidecars to owned sidecars
/// This is called when the user wants to take ownership of Live Photo videos
pub async fn convert_live_photos_to_owned(
library: &Library,
sidecar_manager: &SidecarManager,
content_uuids: &[Uuid],
) -> Result<()> {
for content_uuid in content_uuids {
// This will move all reference sidecars (including Live Photo videos)
// to the managed sidecar directory structure
sidecar_manager
.convert_reference_to_owned(library, content_uuid)
.await?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use tempfile::tempdir;
#[test]
fn test_live_photo_detection() {
let dir = tempdir().unwrap();
let dir_path = dir.path();
// Create test files
let image_path = dir_path.join("IMG_1234.HEIC");
let video_path = dir_path.join("IMG_1234.MOV");
fs::write(&image_path, b"fake image").unwrap();
fs::write(&video_path, b"fake video").unwrap();
// Test detection from image
let result = LivePhotoDetector::detect_pair(&image_path);
assert!(result.is_some());
let live_photo = result.unwrap();
assert_eq!(live_photo.image_path, image_path);
assert_eq!(live_photo.video_path, video_path);
// Test detection from video
let result = LivePhotoDetector::detect_pair(&video_path);
assert!(result.is_some());
let live_photo = result.unwrap();
assert_eq!(live_photo.image_path, image_path);
assert_eq!(live_photo.video_path, video_path);
// Test pair validation
assert!(LivePhotoDetector::is_live_photo_pair(
&image_path,
&video_path
));
}
#[test]
fn test_live_photo_id_generation() {
let id1 = LivePhotoDetector::generate_live_photo_id("hash1", "hash2");
let id2 = LivePhotoDetector::generate_live_photo_id("hash2", "hash1");
// Should generate same ID regardless of order
assert_eq!(id1, id2);
}
}

View File

@ -9,7 +9,6 @@ pub enum SidecarKind {
Embeddings,
Ocr,
Transcript,
LivePhotoVideo,
}
impl SidecarKind {
@ -20,7 +19,6 @@ impl SidecarKind {
Self::Embeddings => "embeddings",
Self::Ocr => "ocr",
Self::Transcript => "transcript",
Self::LivePhotoVideo => "live_photo_video",
}
}
@ -31,7 +29,6 @@ impl SidecarKind {
Self::Embeddings => "embeddings",
Self::Ocr => "ocr",
Self::Transcript => "transcript",
Self::LivePhotoVideo => "live_photos",
}
}
}
@ -52,7 +49,6 @@ impl TryFrom<&str> for SidecarKind {
"embeddings" => Ok(Self::Embeddings),
"ocr" => Ok(Self::Ocr),
"transcript" => Ok(Self::Transcript),
"live_photo_video" => Ok(Self::LivePhotoVideo),
_ => Err(format!("Invalid sidecar kind: {}", value)),
}
}

View File

@ -643,7 +643,6 @@ impl SidecarManager {
"embeddings",
"ocr",
"transcript",
"live_photos",
] {
let kind_path = content_path.join(kind_str);
if !kind_path.exists() {
@ -656,7 +655,6 @@ impl SidecarManager {
"embeddings" => SidecarKind::Embeddings,
"ocr" => SidecarKind::Ocr,
"transcript" => SidecarKind::Transcript,
"live_photos" => SidecarKind::LivePhotoVideo,
_ => continue,
};

View File

@ -4,12 +4,13 @@ use crate::service::sync::state::DeviceSyncState;
use crate::service::sync::metrics::types::*;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use specta::Type;
use std::collections::HashMap;
use std::sync::Arc;
use uuid::Uuid;
/// Point-in-time snapshot of all sync metrics
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize, Type)]
pub struct SyncMetricsSnapshot {
/// When this snapshot was taken
pub timestamp: DateTime<Utc>,
@ -31,18 +32,18 @@ pub struct SyncMetricsSnapshot {
}
/// State metrics snapshot
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, Type)]
pub struct SyncStateSnapshot {
pub current_state: DeviceSyncState,
pub state_entered_at: DateTime<Utc>,
pub uptime_seconds: u64,
pub state_history: Vec<StateTransition>,
pub total_time_in_state: HashMap<DeviceSyncState, u64>, // milliseconds
pub transition_count: HashMap<(DeviceSyncState, DeviceSyncState), u64>,
pub total_time_in_state: Vec<(DeviceSyncState, u64)>, // milliseconds
pub transition_count: Vec<((DeviceSyncState, DeviceSyncState), u64)>,
}
/// Operation metrics snapshot
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, Type)]
pub struct OperationSnapshot {
// Broadcasts
pub broadcasts_sent: u64,
@ -69,7 +70,7 @@ pub struct OperationSnapshot {
}
/// Data volume metrics snapshot
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, Type)]
pub struct DataVolumeSnapshot {
pub entries_synced: HashMap<String, u64>,
pub entries_by_device: HashMap<Uuid, DeviceMetricsSnapshot>,
@ -80,7 +81,7 @@ pub struct DataVolumeSnapshot {
}
/// Device metrics snapshot
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize, Type)]
pub struct DeviceMetricsSnapshot {
pub device_id: Uuid,
pub device_name: String,
@ -90,7 +91,7 @@ pub struct DeviceMetricsSnapshot {
}
/// Performance metrics snapshot
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, Type)]
pub struct PerformanceSnapshot {
pub broadcast_latency: LatencySnapshot,
pub apply_latency: LatencySnapshot,
@ -105,7 +106,7 @@ pub struct PerformanceSnapshot {
}
/// Latency metrics snapshot
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize, Type)]
pub struct LatencySnapshot {
pub count: u64,
pub avg_ms: f64,
@ -114,7 +115,7 @@ pub struct LatencySnapshot {
}
/// Error metrics snapshot
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, Type)]
pub struct ErrorSnapshot {
pub total_errors: u64,
pub network_errors: u64,
@ -141,7 +142,10 @@ impl SyncMetricsSnapshot {
.iter()
.map(|(k, v)| (*k, v.as_millis() as u64))
.collect();
let transition_count = metrics.state.transition_count.read().await.clone();
let transition_count = metrics.state.transition_count.read().await
.iter()
.map(|(k, v)| (*k, *v))
.collect();
let state = SyncStateSnapshot {
current_state,
@ -281,7 +285,18 @@ impl SyncMetricsSnapshot {
self.data_volume.last_sync_per_model.retain(|model, _| model == model_type);
// Filter recent errors
self.errors.recent_errors.retain(|error| error.model_type.as_ref() == Some(model_type));
self.errors.recent_errors.retain(|error| error.model_type.as_deref() == Some(model_type));
}
}
impl Default for LatencySnapshot {
fn default() -> Self {
Self {
count: 0,
avg_ms: 0.0,
min_ms: 0,
max_ms: 0,
}
}
}

View File

@ -2,6 +2,7 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use specta::Type;
use std::collections::{HashMap, VecDeque};
use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering};
use std::sync::Arc;
@ -73,7 +74,7 @@ impl Default for SyncStateMetrics {
}
/// State transition event
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize, Type)]
pub struct StateTransition {
pub from: DeviceSyncState,
pub to: DeviceSyncState,
@ -325,7 +326,7 @@ impl Default for ErrorMetrics {
}
/// Error event for tracking recent errors
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize, Type)]
pub struct ErrorEvent {
pub timestamp: DateTime<Utc>,
pub error_type: String,

Binary file not shown.