=== Cargo.toml === [workspace] members = ["core", "clipper"] resolver = "2" [workspace.dependencies] # Async runtime tokio = { version = "1", features = ["full"] } # Serialization serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" # Error handling anyhow = "1.0" # Thread-safe primitives parking_lot = "0.12" # Compression zstd = "0.13" # Image processing image = "0.25" # Database rusqlite = { version = "0.30", features = ["bundled"] } env_logger = "0.10" [profile.release] # Optimize for speed lto = true codegen-units = 1 opt-level = 3 strip = true === clipper/Cargo.toml === [package] name = "clipper" version = "1.1.5" edition = "2021" [dependencies] # Core functionality nocb = { path = "../core" } crux_core = "0.15" # Serialization serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" # Async runtime tokio = { version = "1", features = ["full"] } # Error handling anyhow = "1.0" # Thread-safe primitives parking_lot = "0.12" # GUI framework - egui egui = "0.28" eframe = { version = "0.28", features = ["glow"] } # System tray support (non-Linux platforms) [target.'cfg(not(target_os = "linux"))'.dependencies] tray-icon = "0.14" global-hotkey = "0.2" [lib] name = "clipper" path = "src/lib.rs" [[bin]] name = "clipper" path = "src/main.rs" [features] default = [] # Add feature flags if you want optional functionality wayland = ["eframe/wayland"] x11 = ["eframe/x11"] # Windows-specific build dependencies for icon [target.'cfg(windows)'.build-dependencies] winres = "0.1" === clipper/src/lib.rs === // clipper/src/lib.rs use crux_core::macros::Effect; use crux_core::{render::Render, App, Command}; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct ClipEntry { pub id: i64, pub hash: String, pub content: String, pub time_ago: String, pub entry_type: String, pub size_str: Option, } #[derive(Serialize, Deserialize, Clone, Debug, Default)] pub struct Model { pub clips: Vec, pub search_query: String, pub selected_index: usize, pub theme: String, } #[derive(Serialize, Deserialize, Clone, Debug)] pub enum Event { Init, RefreshClips, ClipsLoaded(Vec), UpdateSearch(String), SelectIndex(usize), CopyClip(usize), Copied, KeyPress(Key), // Shell request events LoadClips, CopyToClipboard(String), } #[derive(Serialize, Deserialize, Clone, Debug)] pub enum Key { Up, Down, Enter, Escape, } #[derive(Effect)] pub struct Capabilities { render: Render, } #[derive(Default)] pub struct ClipperApp; impl App for ClipperApp { type Event = Event; type Model = Model; type ViewModel = Model; type Capabilities = Capabilities; type Effect = Effect; fn update( &self, event: Self::Event, model: &mut Self::Model, caps: &Self::Capabilities, ) -> Command { match event { Event::Init | Event::RefreshClips => { caps.render.render(); Command::event(Event::LoadClips) } Event::LoadClips => { // This will be handled by the shell Command::done() } Event::ClipsLoaded(clips) => { model.clips = clips; caps.render.render(); Command::done() } Event::UpdateSearch(query) => { model.search_query = query; model.selected_index = 0; caps.render.render(); Command::done() } Event::SelectIndex(index) => { model.selected_index = index; caps.render.render(); Command::done() } Event::CopyClip(index) => { if let Some(clip) = self.filtered_clips(model).get(index) { let content = clip.content.clone(); Command::event(Event::CopyToClipboard(content)) } else { Command::done() } } Event::CopyToClipboard(_) => { // This will be handled by the shell Command::done() } Event::Copied => { caps.render.render(); Command::done() } Event::KeyPress(key) => { match key { Key::Up => { if model.selected_index > 0 { model.selected_index -= 1; caps.render.render(); } Command::done() } Key::Down => { let max = self.filtered_clips(model).len().saturating_sub(1); if model.selected_index < max { model.selected_index += 1; caps.render.render(); } Command::done() } Key::Enter => self.update(Event::CopyClip(model.selected_index), model, caps), Key::Escape => { // Handle in shell (close window) Command::done() } } } } } fn view(&self, model: &Self::Model) -> Self::ViewModel { model.clone() } } impl ClipperApp { fn filtered_clips(&self, model: &Model) -> Vec { if model.search_query.is_empty() { model.clips.clone() } else { let query = model.search_query.to_lowercase(); model .clips .iter() .filter(|clip| clip.content.to_lowercase().contains(&query)) .cloned() .collect() } } } === clipper/src/main.rs === use clipper::{ClipEntry, ClipperApp, Event, Key, Model}; use crux_core::Core; use eframe::egui; use parking_lot::Mutex; use std::sync::Arc; #[cfg(target_os = "windows")] use std::os::windows::process::CommandExt; // chaOS color scheme fn chaos_theme() -> egui::Visuals { let mut visuals = egui::Visuals::dark(); let pink = egui::Color32::from_rgb(0xE6, 0x00, 0x7A); let bg = egui::Color32::from_rgba_unmultiplied(0, 0, 0, 0xCC); let bg_alt = egui::Color32::from_rgb(0x1A, 0x1B, 0x26); visuals.widgets.noninteractive.bg_fill = bg_alt; visuals.widgets.noninteractive.bg_stroke = egui::Stroke::new(2.0, pink); visuals.widgets.noninteractive.fg_stroke = egui::Stroke::new(1.0, pink); visuals.widgets.inactive.bg_fill = bg_alt; visuals.widgets.inactive.bg_stroke = egui::Stroke::new(2.0, pink); visuals.widgets.inactive.fg_stroke = egui::Stroke::new(1.0, pink); visuals.widgets.hovered.bg_fill = pink.linear_multiply(0.2); visuals.widgets.hovered.bg_stroke = egui::Stroke::new(2.0, pink); visuals.widgets.hovered.fg_stroke = egui::Stroke::new(1.0, pink); visuals.widgets.active.bg_fill = pink; visuals.widgets.active.bg_stroke = egui::Stroke::new(2.0, pink); visuals.widgets.active.fg_stroke = egui::Stroke::new(1.0, egui::Color32::BLACK); visuals.selection.bg_fill = pink; visuals.selection.stroke = egui::Stroke::new(1.0, egui::Color32::BLACK); visuals.window_fill = bg; visuals.window_stroke = egui::Stroke::new(3.0, pink); visuals.window_rounding = egui::Rounding::same(8.0); visuals.extreme_bg_color = bg; visuals.panel_fill = bg; visuals.faint_bg_color = bg_alt; visuals } mod daemon { use super::ClipEntry; use std::process::Command; pub fn ensure_daemon_running() -> Result<(), Box> { match send_command("") { Ok(_) => Ok(()), Err(_) => { #[cfg(target_os = "windows")] { use std::os::windows::process::CommandExt; Command::new("nocb") .arg("daemon") .creation_flags(0x08000000) // CREATE_NO_WINDOW .spawn()?; } #[cfg(not(target_os = "windows"))] { use std::process::Stdio; Command::new("nocb") .arg("daemon") .stdout(Stdio::null()) .stderr(Stdio::null()) .spawn()?; } std::thread::sleep(std::time::Duration::from_millis(500)); Ok(()) } } } pub fn get_clips() -> Result, Box> { let output = Command::new("nocb").arg("print").output()?; let stdout = String::from_utf8_lossy(&output.stdout); let mut clips = Vec::new(); for (id, line) in stdout.lines().enumerate() { if line.trim().is_empty() { continue; } let parts: Vec<&str> = line.splitn(2, ' ').collect(); if parts.len() < 2 { continue; } let time_ago = parts[0].to_string(); let rest = parts[1]; let hash_pos = rest.rfind('#').unwrap_or(rest.len()); let content = rest[..hash_pos].trim(); let hash = if hash_pos < rest.len() { rest[hash_pos + 1..].trim().to_string() } else { format!("unknown{}", id) }; let (entry_type, size_str, display_content) = if content.starts_with("[IMG:") { ( "image".to_string(), parse_size_from_image(content), content.to_string(), ) } else if content.contains(" [") && content.ends_with(']') { let bracket_pos = content.rfind(" [").unwrap_or(content.len()); let text_part = &content[..bracket_pos]; let size_part = &content[bracket_pos + 2..content.len() - 1]; ( "text".to_string(), Some(size_part.to_string()), text_part.to_string(), ) } else { ("text".to_string(), None, content.to_string()) }; clips.push(ClipEntry { id: id as i64, hash: hash.clone(), content: display_content, time_ago, entry_type, size_str, }); } Ok(clips) } fn parse_size_from_image(content: &str) -> Option { if let Some(start) = content.find(' ') { if let Some(end) = content.rfind(' ') { if end > start { return Some(content[end + 1..content.len() - 1].to_string()); } } } None } pub fn send_command(selection: &str) -> Result<(), Box> { Command::new("nocb").arg("copy").arg(selection).output()?; Ok(()) } } struct ClipperGui { core: Core, model: Model, show_window: Arc>, } impl ClipperGui { fn new(cc: &eframe::CreationContext<'_>, show_window: Arc>) -> Self { cc.egui_ctx.set_visuals(chaos_theme()); if let Err(e) = daemon::ensure_daemon_running() { eprintln!("Failed to start daemon: {}", e); } let mut app = Self { core: Core::new(), model: Model::default(), show_window, }; app.process_event(Event::Init); app } fn process_event(&mut self, event: Event) { match &event { Event::Init | Event::RefreshClips | Event::LoadClips => { self.load_clips(); } Event::CopyClip(index) => { if let Some(clip) = self.filtered_clips().get(*index) { self.copy_to_clipboard(&clip); } } Event::CopyToClipboard(selection) => { if let Err(e) = daemon::send_command(selection) { eprintln!("Failed to copy: {}", e); } } _ => {} } let _effects = self.core.process_event(event); self.model = self.core.view(); } fn load_clips(&mut self) { match daemon::get_clips() { Ok(clips) => { self.process_event(Event::ClipsLoaded(clips)); } Err(e) => { eprintln!("Failed to load clips: {}", e); self.process_event(Event::ClipsLoaded(vec![ClipEntry { id: 0, hash: "error".to_string(), content: format!("Failed to load clips: {}", e), time_ago: "!".to_string(), entry_type: "text".to_string(), size_str: None, }])); } } } fn copy_to_clipboard(&mut self, clip: &ClipEntry) { let selection = if clip.size_str.is_some() { format!( "{} {} [{}] #{}", clip.time_ago, clip.content, clip.size_str.as_ref().unwrap(), clip.hash ) } else { format!("{} {} #{}", clip.time_ago, clip.content, clip.hash) }; self.process_event(Event::CopyToClipboard(selection)); self.process_event(Event::Copied); } fn filtered_clips(&self) -> Vec { if self.model.search_query.is_empty() { self.model.clips.clone() } else { let query = self.model.search_query.to_lowercase(); self.model .clips .iter() .filter(|clip| clip.content.to_lowercase().contains(&query)) .cloned() .collect() } } } impl eframe::App for ClipperGui { fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) { if !*self.show_window.lock() { ctx.send_viewport_cmd(egui::ViewportCommand::Minimized(true)); return; } if ctx.input(|i| i.key_pressed(egui::Key::Escape)) { *self.show_window.lock() = false; ctx.send_viewport_cmd(egui::ViewportCommand::Close); } egui::CentralPanel::default() .frame( egui::Frame::none() .fill(egui::Color32::from_rgba_unmultiplied(0, 0, 0, 0xCC)) .inner_margin(egui::Margin::same(24.0)), ) .show(ctx, |ui| { ui.spacing_mut().item_spacing = egui::vec2(8.0, 16.0); let bg_alt = egui::Color32::from_rgb(0x1A, 0x1B, 0x26); let pink = egui::Color32::from_rgb(0xE6, 0x00, 0x7A); let cyan = egui::Color32::from_rgb(0x00, 0xFF, 0xE1); egui::Frame::none() .fill(bg_alt) .rounding(egui::Rounding::same(5.0)) .inner_margin(egui::Margin::symmetric(16.0, 12.0)) .show(ui, |ui| { ui.horizontal(|ui| { ui.colored_label(pink, "󱞩 nocb:"); let response = ui.add_sized( [ui.available_width() - 120.0, 20.0], egui::TextEdit::singleline(&mut self.model.search_query) .font(egui::TextStyle::Monospace) .hint_text("Type to search...") .desired_width(f32::INFINITY), ); response.request_focus(); if response.changed() { self.process_event(Event::UpdateSearch( self.model.search_query.clone(), )); } ui.colored_label( cyan, format!( "{}/{}", self.filtered_clips().len(), self.model.clips.len() ), ); if ui.button("⟳").clicked() { self.process_event(Event::RefreshClips); } }); }); ui.add_space(8.0); let clips = self.filtered_clips(); egui::ScrollArea::vertical() .auto_shrink([false; 2]) .show(ui, |ui| { for (index, clip) in clips.iter().enumerate() { let is_selected = index == self.model.selected_index; let mut frame = egui::Frame::none() .inner_margin(egui::Margin::symmetric(16.0, 10.0)) .rounding(egui::Rounding::same(5.0)); if is_selected { frame = frame.fill(pink); } else if index % 2 == 1 { frame = frame.fill(egui::Color32::from_rgba_unmultiplied( 0x1A, 0x1B, 0x26, 0x11, )); } let response = frame .show(ui, |ui| { ui.horizontal(|ui| { if is_selected { ui.colored_label(egui::Color32::BLACK, &clip.time_ago); } else { ui.colored_label(cyan, &clip.time_ago); } ui.separator(); let label = if is_selected { ui.colored_label(egui::Color32::BLACK, &clip.content) } else { ui.label(&clip.content) }; if let Some(size) = &clip.size_str { ui.with_layout( egui::Layout::right_to_left(egui::Align::Center), |ui| { if is_selected { ui.colored_label( egui::Color32::BLACK, size, ); } else { ui.colored_label(cyan, size); } }, ); } label }) }) .inner; if response.response.clicked() { self.process_event(Event::SelectIndex(index)); self.process_event(Event::CopyClip(index)); *self.show_window.lock() = false; ctx.send_viewport_cmd(egui::ViewportCommand::Close); } if response.response.hovered() && !is_selected { self.process_event(Event::SelectIndex(index)); } } }); if ctx.input(|i| i.key_pressed(egui::Key::ArrowUp)) { self.process_event(Event::KeyPress(Key::Up)); } if ctx.input(|i| i.key_pressed(egui::Key::ArrowDown)) { self.process_event(Event::KeyPress(Key::Down)); } if ctx.input(|i| i.key_pressed(egui::Key::Enter)) { self.process_event(Event::KeyPress(Key::Enter)); *self.show_window.lock() = false; ctx.send_viewport_cmd(egui::ViewportCommand::Close); } }); ctx.request_repaint_after(std::time::Duration::from_secs(5)); } } #[cfg(not(target_os = "linux"))] fn setup_tray_and_hotkey(show_window: Arc>) -> Result<(), Box> { use global_hotkey::{ hotkey::{Code, HotKey, Modifiers}, GlobalHotKeyManager, }; use tray_icon::{ menu::{Menu, MenuItem}, Icon, TrayIconBuilder, }; let menu = Menu::new(); let show_item = MenuItem::new("Show Clipper", true, None); let quit_item = MenuItem::new("Quit", true, None); menu.append(&show_item)?; menu.append(&quit_item)?; let show_id = show_item.id().clone(); let quit_id = quit_item.id().clone(); let mut icon_data = vec![0u8; 32 * 32 * 4]; for chunk in icon_data.chunks_mut(4) { chunk[0] = 0xE6; // R chunk[1] = 0x00; // G chunk[2] = 0x7A; // B chunk[3] = 0xFF; // A } let icon = Icon::from_rgba(icon_data, 32, 32)?; let _tray = TrayIconBuilder::new() .with_menu(Box::new(menu)) .with_tooltip("Clipper - Clipboard Manager") .with_icon(icon) .build()?; let manager = GlobalHotKeyManager::new()?; let hotkey = HotKey::new( Some(Modifiers::SUPER), Code::KeyB, ); manager.register(hotkey)?; std::thread::spawn(move || { let menu_channel = tray_icon::menu::MenuEvent::receiver(); let hotkey_channel = global_hotkey::GlobalHotKeyEvent::receiver(); loop { if let Ok(event) = menu_channel.try_recv() { if event.id == show_id { *show_window.lock() = true; } else if event.id == quit_id { std::process::exit(0); } } if let Ok(_event) = hotkey_channel.try_recv() { let mut window_visible = show_window.lock(); *window_visible = !*window_visible; } std::thread::sleep(std::time::Duration::from_millis(10)); } }); Ok(()) } fn main() -> Result<(), eframe::Error> { unsafe { std::env::set_var("WGPU_BACKEND", "gl"); } let show_window = Arc::new(Mutex::new(false)); #[cfg(not(target_os = "linux"))] { if let Err(e) = setup_tray_and_hotkey(show_window.clone()) { eprintln!("Failed to setup tray/hotkey: {}", e); } } let options = eframe::NativeOptions { viewport: egui::ViewportBuilder::default() .with_inner_size([600.0, 700.0]) .with_always_on_top() .with_decorations(false) .with_transparent(true) .with_visible(false), renderer: eframe::Renderer::Glow, ..Default::default() }; let show_window_clone = show_window.clone(); eframe::run_native( "Clipper", options, Box::new(move |cc| Ok(Box::new(ClipperGui::new(cc, show_window_clone)))), ) } === core/Cargo.toml === [package] name = "nocb" version = "1.1.5" edition = "2024" [lib] name = "nocb" path = "src/lib.rs" [[bin]] name = "nocb" path = "src/main.rs" [dependencies] # Error handling anyhow = { workspace = true } # Clipboard access arboard = "3.2" # Hashing blake3 = "1.5" # LRU cache lru = "0.12" # Thread-safe primitives parking_lot = { workspace = true } # Database rusqlite = { workspace = true } # Serialization serde = { workspace = true } serde_json = { workspace = true } toml = "0.8" # Async runtime tokio = { workspace = true } # Security zeroize = "1.7" # Compression zstd = { workspace = true } # Image processing image = { workspace = true } # System directories dirs = "5.0" clap = { version = "4.5", features = ["derive"] } # Platform-specific dependencies [target.'cfg(unix)'.dependencies] libc = "0.2" [target.'cfg(windows)'.dependencies] # Windows-specific if needed === core/src/cache.rs === use lru::LruCache; use parking_lot::RwLock; use std::num::NonZeroUsize; use std::sync::Arc; #[derive(Clone, Debug)] pub struct CachedEntry { pub content_type: String, pub inline_text: Option, pub file_path: Option, pub compressed: bool, pub size_bytes: usize, pub timestamp: i64, pub mime_type: Option, pub width: Option, pub height: Option, } impl CachedEntry { pub fn text(content: String, timestamp: i64, size: usize) -> Self { Self { content_type: "text".to_string(), inline_text: Some(content), file_path: None, compressed: false, size_bytes: size, timestamp, mime_type: None, width: None, height: None, } } pub fn text_file(path: String, compressed: bool, timestamp: i64, size: usize) -> Self { Self { content_type: "text_file".to_string(), inline_text: None, file_path: Some(path), compressed, size_bytes: size, timestamp, mime_type: None, width: None, height: None, } } pub fn image( path: String, mime: String, width: u32, height: u32, timestamp: i64, size: usize, ) -> Self { Self { content_type: "image".to_string(), inline_text: None, file_path: Some(path), compressed: false, size_bytes: size, timestamp, mime_type: Some(mime), width: Some(width), height: Some(height), } } } pub struct EntryCache { inner: Arc>>, } impl EntryCache { pub fn new(capacity: usize) -> Self { Self { inner: Arc::new(RwLock::new(LruCache::new( NonZeroUsize::new(capacity).unwrap_or(NonZeroUsize::new(64).unwrap()), ))), } } pub fn get(&self, hash: &str) -> Option { self.inner.write().get(hash).cloned() } pub fn peek(&self, hash: &str) -> Option { self.inner.read().peek(hash).cloned() } pub fn put(&self, hash: String, entry: CachedEntry) { self.inner.write().put(hash, entry); } pub fn contains(&self, hash: &str) -> bool { self.inner.read().contains(hash) } pub fn remove(&self, hash: &str) -> Option { self.inner.write().pop(hash) } pub fn clear(&self) { self.inner.write().clear(); } pub fn iter_sorted(&self) -> Vec<(String, CachedEntry)> { let cache = self.inner.read(); let mut entries: Vec<_> = cache.iter().map(|(k, v)| (k.clone(), v.clone())).collect(); entries.sort_by(|a, b| b.1.timestamp.cmp(&a.1.timestamp)); entries } pub fn get_hashes(&self) -> Vec { self.inner.read().iter().map(|(k, _)| k.clone()).collect() } pub fn len(&self) -> usize { self.inner.read().len() } pub fn is_empty(&self) -> bool { self.inner.read().is_empty() } } === core/src/lib.rs === mod cache; use cache::{CachedEntry, EntryCache}; use anyhow::{Context, Result}; use arboard::{Clipboard, ImageData}; use blake3::Hasher; use parking_lot::RwLock; use rusqlite::{Connection as SqliteConnection, OptionalExtension, params}; use serde::{Deserialize, Serialize}; use std::fs; use std::io::{Read, Write}; use std::path::{Path, PathBuf}; use std::sync::Arc; use std::time::{Duration, SystemTime, UNIX_EPOCH}; use tokio::sync::mpsc; use zeroize::Zeroize; const MAX_INLINE_SIZE: usize = 512; const POLL_INTERVAL_MS: u64 = 100; const HASH_PREFIX_LEN: usize = 8; const MAX_CLIPBOARD_SIZE: usize = 100 * 1024 * 1024; // 100MB const MAX_IPC_MESSAGE_SIZE: usize = 4096; const IPC_MAGIC: &[u8] = b"NOCB\x00\x01"; const LRU_CACHE_SIZE: usize = 64; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Config { pub cache_dir: PathBuf, pub max_entries: usize, pub max_display_length: usize, pub max_print_entries: usize, pub blacklist: Vec, pub trim_whitespace: bool, pub static_entries: Vec, pub compress_threshold: usize, #[serde(default = "default_max_age_days")] pub max_age_days: u32, } fn default_max_age_days() -> u32 { 30 } impl Default for Config { fn default() -> Self { let cache_dir = dirs::cache_dir() .unwrap_or_else(|| PathBuf::from("/tmp")) .join("nocb"); Self { cache_dir, max_entries: 10000, max_display_length: 200, max_print_entries: 1000, blacklist: Vec::new(), trim_whitespace: true, static_entries: Vec::new(), compress_threshold: 4096, max_age_days: 30, } } } impl Config { pub fn load() -> Result { let config_path = dirs::config_dir() .unwrap_or_default() .join("nocb") .join("config.toml"); if config_path.exists() { let content = fs::read_to_string(&config_path)?; Ok(toml::from_str(&content)?) } else { let config = Self::default(); if let Some(parent) = config_path.parent() { fs::create_dir_all(parent)?; } fs::write(&config_path, toml::to_string_pretty(&config)?)?; Ok(config) } } } #[derive(Debug, Clone)] pub enum ContentType { Text(String), TextFile { hash: String, compressed: bool, }, Image { mime: String, hash: String, width: u32, height: u32, }, } #[derive(Debug, Clone)] pub struct Entry { pub id: Option, pub hash: String, pub timestamp: u64, pub app_name: String, pub content: ContentType, pub size_bytes: usize, } impl Entry { fn new(content: ContentType, app_name: String, hash: String, size: usize) -> Self { let timestamp = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap_or_default() .as_secs(); Self { id: None, hash, timestamp, app_name, content, size_bytes: size, } } } #[derive(Debug)] pub enum Command { Copy(String), Exit, Clear, Prune(Vec), } enum ClipboardContent<'a> { Text(String), Image(ImageData<'a>), } pub struct ClipboardManager { config: Config, db: SqliteConnection, clipboard: Arc>, last_clipboard_hash: Option, command_rx: Option>, cache: EntryCache, } impl ClipboardManager { pub async fn new(config: Config) -> Result { fs::create_dir_all(&config.cache_dir)?; fs::create_dir_all(config.cache_dir.join("blobs"))?; let db_path = config.cache_dir.join("index.db"); let db = SqliteConnection::open(&db_path)?; Self::init_db(&db)?; let clipboard = Clipboard::new().context("Failed to initialize clipboard")?; let cache = EntryCache::new(LRU_CACHE_SIZE); Ok(Self { config, db, clipboard: Arc::new(RwLock::new(clipboard)), last_clipboard_hash: None, command_rx: None, cache, }) } fn init_db(db: &SqliteConnection) -> Result<()> { db.execute_batch( "PRAGMA journal_mode = WAL; PRAGMA synchronous = NORMAL; PRAGMA cache_size = -64000; PRAGMA mmap_size = 268435456; PRAGMA temp_store = MEMORY; PRAGMA foreign_keys = ON;", )?; db.execute_batch( "CREATE TABLE IF NOT EXISTS entries ( id INTEGER PRIMARY KEY, hash TEXT NOT NULL UNIQUE, timestamp INTEGER NOT NULL, app_name TEXT NOT NULL, content_type TEXT NOT NULL, file_path TEXT, inline_text TEXT, mime_type TEXT, size_bytes INTEGER NOT NULL, compressed INTEGER DEFAULT 0, width INTEGER, height INTEGER ); CREATE INDEX IF NOT EXISTS idx_timestamp ON entries(timestamp DESC); CREATE INDEX IF NOT EXISTS idx_hash ON entries(hash);", )?; // schema migration for existing databases let has_compressed: bool = db.query_row( "SELECT COUNT(*) FROM pragma_table_info('entries') WHERE name='compressed'", [], |row| row.get::<_, i64>(0).map(|count| count > 0), )?; if !has_compressed { db.execute( "ALTER TABLE entries ADD COLUMN compressed INTEGER DEFAULT 0", [], )?; } let has_width: bool = db.query_row( "SELECT COUNT(*) FROM pragma_table_info('entries') WHERE name='width'", [], |row| row.get::<_, i64>(0).map(|count| count > 0), )?; if !has_width { db.execute("ALTER TABLE entries ADD COLUMN width INTEGER", [])?; db.execute("ALTER TABLE entries ADD COLUMN height INTEGER", [])?; } Ok(()) } pub async fn run_daemon(&mut self) -> Result<()> { let (tx, rx) = mpsc::channel(10); self.command_rx = Some(rx); #[cfg(unix)] let sock_path = std::env::temp_dir().join("nocb.sock"); #[cfg(windows)] let sock_path = PathBuf::from(r"\\.\pipe\nocb"); let tx_clone = tx.clone(); let sock_path_clone = sock_path.clone(); let ipc_handle = tokio::spawn(async move { let result = Self::ipc_server(tx_clone, sock_path_clone.clone()).await; #[cfg(unix)] let _ = std::fs::remove_file(&sock_path_clone); result }); let mut interval = tokio::time::interval(Duration::from_millis(POLL_INTERVAL_MS)); let mut cleanup_counter = 0u64; loop { tokio::select! { _ = interval.tick() => { if let Err(e) = self.poll_clipboard().await { eprintln!("Poll error: {}", e); } cleanup_counter += 1; // Run cleanup every ~100 seconds if cleanup_counter % 1000 == 0 { let _ = self.cleanup_old_entries(); } } cmd = async { self.command_rx.as_mut()?.recv().await } => { if let Some(cmd) = cmd { match cmd { Command::Copy(selection) => { if let Err(e) = self.copy_selection(&selection).await { eprintln!("Copy error: {}", e); } } Command::Exit => break, Command::Clear => { if let Err(e) = self.clear() { eprintln!("Clear error: {}", e); } } Command::Prune(hashes) => { if let Err(e) = self.prune(&hashes) { eprintln!("Prune error: {}", e); } } } } } } } #[cfg(unix)] let _ = std::fs::remove_file(&sock_path); ipc_handle.abort(); Ok(()) } async fn handle_ipc_command(cmd: &str, tx: &mpsc::Sender) -> Result<()> { let cmd = cmd.trim(); match cmd { cmd if cmd.starts_with("COPY:") => { let selection = cmd[5..].to_string(); tx.send(Command::Copy(selection)).await?; } cmd if cmd.starts_with("PRUNE:") => { let hashes_str = cmd[6..].to_string(); let hashes: Vec = hashes_str .split(',') .map(|s| s.trim().to_string()) .collect(); tx.send(Command::Prune(hashes)).await?; } "CLEAR" => { tx.send(Command::Clear).await?; } _ => { eprintln!("Unknown command: {}", cmd); } } Ok(()) } #[cfg(all(unix, target_os = "linux"))] fn verify_peer_uid(stream: &tokio::net::UnixStream) -> bool { match stream.peer_cred() { Ok(cred) => cred.uid() == unsafe { libc::getuid() }, Err(_) => false, } } #[cfg(all(unix, not(target_os = "linux")))] fn verify_peer_uid(_stream: &tokio::net::UnixStream) -> bool { true } async fn ipc_server(tx: mpsc::Sender, sock_path: PathBuf) -> Result<()> { let _ = std::fs::remove_file(&sock_path); #[cfg(unix)] { use tokio::net::UnixListener; let listener = UnixListener::bind(&sock_path)?; use std::os::unix::fs::PermissionsExt; let perms = std::fs::Permissions::from_mode(0o700); std::fs::set_permissions(&sock_path, perms)?; loop { let (mut stream, _addr) = listener.accept().await?; #[cfg(target_os = "linux")] if !Self::verify_peer_uid(&stream) { continue; } let tx = tx.clone(); tokio::spawn(async move { use tokio::io::AsyncReadExt; let mut buf = vec![0u8; MAX_IPC_MESSAGE_SIZE]; match stream.read(&mut buf).await { Ok(n) if n > IPC_MAGIC.len() => { if &buf[..IPC_MAGIC.len()] != IPC_MAGIC { return; } if let Ok(cmd) = String::from_utf8(buf[IPC_MAGIC.len()..n].to_vec()) { let _ = Self::handle_ipc_command(&cmd, &tx).await; } } _ => {} } }); } } #[cfg(windows)] { use tokio::io::AsyncReadExt; use tokio::net::windows::named_pipe::{PipeMode, ServerOptions}; let pipe_name = r"\\.\pipe\nocb"; loop { let mut server = ServerOptions::new() .first_pipe_instance(true) .pipe_mode(PipeMode::Message) .create(pipe_name)?; server.connect().await?; let tx = tx.clone(); tokio::spawn(async move { let mut buf = vec![0u8; MAX_IPC_MESSAGE_SIZE]; match server.read(&mut buf).await { Ok(n) if n > IPC_MAGIC.len() => { if &buf[..IPC_MAGIC.len()] != IPC_MAGIC { return; } if let Ok(cmd) = String::from_utf8(buf[IPC_MAGIC.len()..n].to_vec()) { let _ = Self::handle_ipc_command(&cmd, &tx).await; } } _ => {} } }); } } } pub async fn send_copy_command(selection: &str) -> Result<()> { Self::send_command(&format!("COPY:{}", selection)).await } pub async fn send_command(cmd: &str) -> Result<()> { use tokio::io::AsyncWriteExt; use tokio::time::timeout; #[cfg(unix)] { use tokio::net::UnixStream; let sock_path = std::env::temp_dir().join("nocb.sock"); let mut stream = timeout(Duration::from_secs(2), UnixStream::connect(&sock_path)) .await .context("Connection timeout")? .context("Failed to connect to daemon")?; let mut msg = Vec::with_capacity(IPC_MAGIC.len() + cmd.len()); msg.extend_from_slice(IPC_MAGIC); msg.extend_from_slice(cmd.as_bytes()); stream.write_all(&msg).await?; stream.shutdown().await?; } #[cfg(windows)] { use tokio::net::windows::named_pipe::ClientOptions; let pipe_name = r"\\.\pipe\nocb"; let mut client = ClientOptions::new().open(pipe_name)?; let mut msg = Vec::with_capacity(IPC_MAGIC.len() + cmd.len()); msg.extend_from_slice(IPC_MAGIC); msg.extend_from_slice(cmd.as_bytes()); timeout(Duration::from_secs(2), client.write_all(&msg)) .await .context("Write timeout")? .context("Failed to write to pipe")?; } Ok(()) } async fn poll_clipboard(&mut self) -> Result<()> { let content = { match self.clipboard.try_write() { Some(mut clipboard) => { if let Ok(text) = clipboard.get_text() { Some(ClipboardContent::Text(text)) } else if let Ok(img) = clipboard.get_image() { Some(ClipboardContent::Image(img)) } else { None } } None => return Ok(()), } }; if let Some(content) = content { let entry = match content { ClipboardContent::Text(text) => { if text.trim().is_empty() && self.config.trim_whitespace { return Ok(()); } let text = if self.config.trim_whitespace { text.trim().to_string() } else { text }; let hash = self.hash_data(text.as_bytes()); if self.last_clipboard_hash.as_ref() == Some(&hash) { return Ok(()); } if self.entry_exists(&hash)? { self.last_clipboard_hash = Some(hash); return Ok(()); } let size = text.len(); if size > MAX_CLIPBOARD_SIZE { return Ok(()); } let content = if size <= MAX_INLINE_SIZE { ContentType::Text(text) } else { let compressed = size > self.config.compress_threshold; let stored_hash = self.store_text_blob(&hash, &text, compressed)?; ContentType::TextFile { hash: stored_hash, compressed, } }; Some(Entry::new(content, "unknown".to_string(), hash, size)) } ClipboardContent::Image(img) => { let (width, height) = (img.width as u32, img.height as u32); let data = self.image_to_png(&img)?; let hash = self.hash_data(&data); if self.last_clipboard_hash.as_ref() == Some(&hash) { return Ok(()); } if self.entry_exists(&hash)? { self.last_clipboard_hash = Some(hash); return Ok(()); } if data.len() > MAX_CLIPBOARD_SIZE { return Ok(()); } let stored_hash = self.store_blob(&hash, &data)?; let content = ContentType::Image { mime: "image/png".to_string(), hash: stored_hash.clone(), width, height, }; Some(Entry::new( content, "unknown".to_string(), stored_hash, data.len(), )) } }; if let Some(entry) = entry { self.last_clipboard_hash = Some(entry.hash.clone()); self.add_entry(entry).await?; } } else { self.last_clipboard_hash = None; } Ok(()) } fn entry_exists(&self, hash: &str) -> Result { if self.cache.contains(hash) { let _ = self.db.execute( "UPDATE entries SET timestamp = ?1 WHERE hash = ?2", params![ SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs() as i64, hash ], ); return Ok(true); } let exists: bool = self .db .query_row( "SELECT 1 FROM entries WHERE hash = ?1", params![hash], |_| Ok(true), ) .optional()? .unwrap_or(false); Ok(exists) } fn image_to_png(&self, img: &ImageData) -> Result> { use image::{ImageBuffer, Rgba}; let width = img.width as u32; let height = img.height as u32; let img_buffer = ImageBuffer::, Vec>::from_raw(width, height, img.bytes.to_vec()) .context("Failed to create image buffer")?; let mut png_data = Vec::new(); let encoder = image::codecs::png::PngEncoder::new(&mut png_data); image::ImageEncoder::write_image( encoder, &img_buffer, width, height, image::ExtendedColorType::Rgba8, )?; Ok(png_data) } fn hash_data(&self, data: &[u8]) -> String { let mut hasher = Hasher::new(); hasher.update(data); hasher.finalize().to_hex().to_string() } fn store_blob(&self, hash: &str, data: &[u8]) -> Result { if hash.contains('/') || hash.contains('\\') || hash.contains("..") { anyhow::bail!("Invalid hash"); } let path = self.config.cache_dir.join("blobs").join(hash); if !path.exists() { fs::write(&path, data)?; } Ok(hash.to_string()) } fn store_text_blob(&self, hash: &str, text: &str, compress: bool) -> Result { let filename = if compress { format!("{}.txt.zst", hash) } else { format!("{}.txt", hash) }; let path = self.config.cache_dir.join("blobs").join(&filename); if !path.exists() { if compress { use zstd::stream::write::Encoder; let file = fs::File::create(&path)?; let mut encoder = Encoder::new(file, 3)?; encoder.write_all(text.as_bytes())?; encoder.finish()?; } else { fs::write(&path, text)?; } } Ok(hash.to_string()) } async fn add_entry(&mut self, entry: Entry) -> Result<()> { if self .config .blacklist .iter() .any(|app| entry.app_name.contains(app)) { return Ok(()); } let timestamp = entry.timestamp as i64; let cached_entry = match &entry.content { ContentType::Text(text) => { self.db.execute( "INSERT OR REPLACE INTO entries (hash, timestamp, app_name, content_type, inline_text, size_bytes) VALUES (?1, ?2, ?3, 'text', ?4, ?5)", params![entry.hash, timestamp, entry.app_name, text, entry.size_bytes as i64], )?; CachedEntry::text(text.clone(), timestamp, entry.size_bytes) } ContentType::TextFile { hash, compressed } => { let file_path = if *compressed { format!("{}.txt.zst", hash) } else { format!("{}.txt", hash) }; self.db.execute( "INSERT OR REPLACE INTO entries (hash, timestamp, app_name, content_type, file_path, size_bytes, compressed) VALUES (?1, ?2, ?3, 'text_file', ?4, ?5, ?6)", params![entry.hash, timestamp, entry.app_name, file_path, entry.size_bytes as i64, *compressed as i64], )?; CachedEntry::text_file(file_path, *compressed, timestamp, entry.size_bytes) } ContentType::Image { mime, hash, width, height, } => { self.db.execute( "INSERT OR REPLACE INTO entries (hash, timestamp, app_name, content_type, file_path, mime_type, size_bytes, width, height) VALUES (?1, ?2, ?3, 'image', ?4, ?5, ?6, ?7, ?8)", params![entry.hash, timestamp, entry.app_name, hash, mime, entry.size_bytes as i64, *width as i64, *height as i64], )?; CachedEntry::image( hash.clone(), mime.clone(), *width, *height, timestamp, entry.size_bytes, ) } }; self.cache.put(entry.hash.clone(), cached_entry); Ok(()) } fn cleanup_old_entries(&mut self) -> Result<()> { // Clean by max entries let mut stmt = self.db.prepare( "SELECT hash FROM entries WHERE id NOT IN ( SELECT id FROM entries ORDER BY timestamp DESC LIMIT ?1 )", )?; let hashes_to_delete: Vec = stmt .query_map(params![self.config.max_entries as i64], |row| row.get(0))? .collect::, _>>()?; drop(stmt); // Clean by age let cutoff = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs() - (self.config.max_age_days as u64 * 86400); let mut stmt = self .db .prepare("SELECT hash FROM entries WHERE timestamp < ?1")?; let old_hashes: Vec = stmt .query_map(params![cutoff as i64], |row| row.get(0))? .collect::, _>>()?; drop(stmt); // Delete all collected hashes for hash in hashes_to_delete.into_iter().chain(old_hashes) { self.delete_entry(&hash)?; } // VACUUM occasionally (every 100 cleanups) static VACUUM_COUNTER: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0); if VACUUM_COUNTER.fetch_add(1, std::sync::atomic::Ordering::Relaxed) % 100 == 0 { let _ = self.db.execute("VACUUM", []); } Ok(()) } fn delete_entry(&mut self, hash: &str) -> Result<()> { let file_path: Option = self .db .query_row( "SELECT file_path FROM entries WHERE hash = ?1", params![hash], |row| row.get(0), ) .optional()?; self.db .execute("DELETE FROM entries WHERE hash = ?1", params![hash])?; self.cache.remove(hash); if let Some(file_path) = file_path { let path = self.config.cache_dir.join("blobs").join(file_path); let _ = fs::remove_file(path); } else { // backward compatibility let path = self.config.cache_dir.join("blobs").join(hash); let _ = fs::remove_file(path); } Ok(()) } pub fn read_text_preview(&self, file_path: &str, max_len: usize) -> Option { let path = self.config.cache_dir.join("blobs").join(file_path); if file_path.ends_with(".zst") { self.read_compressed_preview(&path, max_len) } else { self.read_plain_preview(&path, max_len) } } pub fn read_compressed_preview(&self, path: &Path, max_len: usize) -> Option { use zstd::stream::read::Decoder; let file = fs::File::open(path).ok()?; let mut decoder = Decoder::new(file).ok()?; let mut buffer = vec![0u8; max_len * 4]; let n = std::io::Read::read(&mut decoder, &mut buffer).ok()?; String::from_utf8(buffer[..n].to_vec()).ok() } pub fn read_plain_preview(&self, path: &Path, max_len: usize) -> Option { let data = fs::read(path).ok()?; let len = data.len().min(max_len * 4); String::from_utf8(data[..len].to_vec()).ok() } pub fn format_time_ago(&self, timestamp: i64) -> String { let now = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs(); let ago_secs = now.saturating_sub(timestamp as u64); match ago_secs { 0..=59 => format!("{}s", ago_secs), 60..=3599 => format!("{}m", ago_secs / 60), 3600..=86399 => format!("{}h", ago_secs / 3600), _ => format!("{}d", ago_secs / 86400), } } pub fn format_size(&self, bytes: i64) -> String { match bytes { 0..=1023 => format!("{}B", bytes), 1024..=1048575 => format!("{}K", bytes / 1024), _ => format!("{}M", bytes / (1024 * 1024)), } } pub fn print_history(&self) -> Result<()> { // Print static entries for entry in &self.config.static_entries { println!("{}", entry.replace('\n', " ")); } let _cached_hashes = self.cache.get_hashes(); // Process cached entries first let mut printed_hashes = std::collections::HashSet::new(); let mut printed_count = 0; // Sort cached entries by timestamp let cached_entries = self.cache.iter_sorted(); // Print cached entries for (hash, cached) in cached_entries.iter().take(self.config.max_print_entries) { if printed_count >= self.config.max_print_entries { break; } self.print_cached_entry(hash, cached)?; printed_hashes.insert(hash.clone()); printed_count += 1; } // Only query database for remaining entries if needed if printed_count < self.config.max_print_entries { let mut stmt = self.db.prepare( "SELECT hash, app_name, content_type, inline_text, file_path, mime_type, size_bytes, timestamp, width, height FROM entries WHERE hash NOT IN (SELECT value FROM json_each(?1)) ORDER BY timestamp DESC LIMIT ?2" )?; let excluded_json = serde_json::to_string(&printed_hashes.iter().collect::>())?; let remaining_limit = self.config.max_print_entries - printed_count; let rows = stmt.query_map(params![excluded_json, remaining_limit as i64], |row| { let hash: String = row.get(0)?; let app_name: String = row.get(1)?; let content_type: String = row.get(2)?; let inline_text: Option = row.get(3)?; let file_path: Option = row.get(4)?; let mime_type: Option = row.get(5)?; let size_bytes: i64 = row.get(6)?; let timestamp: i64 = row.get(7)?; let width: Option = row.get(8)?; let height: Option = row.get(9)?; Ok(( hash, app_name, content_type, inline_text, file_path, mime_type, size_bytes, timestamp, width, height, )) })?; for row in rows { let ( hash, _app_name, content_type, inline_text, file_path, mime_type, size_bytes, timestamp, width, height, ) = row?; let time_str = self.format_time_ago(timestamp); let hash_prefix = &hash[..HASH_PREFIX_LEN.min(hash.len())]; let size_str = self.format_size(size_bytes); match content_type.as_str() { "text" => { if let Some(text) = inline_text { let available_chars = 80 - time_str.len() - 1 - 9 - 1; let display = self.truncate_to_fit(&text, available_chars); let line = format!("{} {}", time_str, display); println!("{:<70} #{}", line, hash_prefix); } } "text_file" => { if let Some(fp) = file_path { let available_chars = 80 - time_str.len() - 1 - 9 - 1 - size_str.len() - 3; let preview = self .read_text_preview(&fp, available_chars * 4) .map(|p| self.truncate_to_fit(&p, available_chars)) .filter(|p| !p.is_empty()); if let Some(display) = preview { println!( "{} {} [{}] #{}", time_str, display, size_str, hash_prefix ); } else { let line = format!("{} [Text: {}]", time_str, size_str); println!("{:<70} #{}", line, hash_prefix); } } } "image" => { let mime_short = mime_type .as_ref() .map(|m| m.split('/').last().unwrap_or("?")) .unwrap_or("?"); if let (Some(w), Some(h)) = (width, height) { let dims_str = format!("{}x{}px", w, h); let available = 80 - time_str.len() - 1 - 9 - 7 - mime_short.len() - size_str.len() - 2; if dims_str.len() <= available { println!( "{} [IMG:{} {} {}] #{}", time_str, dims_str, mime_short, size_str, hash_prefix ); } else { println!( "{} [IMG:{} {}] #{}", time_str, mime_short, size_str, hash_prefix ); } } else { println!( "{} [IMG:{} {}] #{}", time_str, mime_short, size_str, hash_prefix ); } } _ => {} } } } Ok(()) } fn print_cached_entry(&self, hash: &str, cached: &CachedEntry) -> Result<()> { let time_str = self.format_time_ago(cached.timestamp); let hash_prefix = &hash[..HASH_PREFIX_LEN.min(hash.len())]; let size_str = self.format_size(cached.size_bytes as i64); match cached.content_type.as_str() { "text" => { if let Some(text) = &cached.inline_text { let available_chars = 80 - time_str.len() - 1 - 9 - 1; let display = self.truncate_to_fit(text, available_chars); let line = format!("{} {}", time_str, display); println!("{:<70} #{}", line, hash_prefix); } } "text_file" => { if let Some(fp) = &cached.file_path { let available_chars = 80 - time_str.len() - 1 - 9 - 1 - size_str.len() - 3; let preview = self .read_text_preview(fp, available_chars * 4) .map(|p| self.truncate_to_fit(&p, available_chars)) .filter(|p| !p.is_empty()); if let Some(display) = preview { let line = format!("{} {} [{}]", time_str, display, size_str); println!("{:<70} #{}", line, hash_prefix); } else { let line = format!("{} [Text: {}]", time_str, size_str); println!("{:<70} #{}", line, hash_prefix); } } } "image" => { let mime_short = cached .mime_type .as_ref() .map(|m| m.split('/').last().unwrap_or("?")) .unwrap_or("?"); if let (Some(w), Some(h)) = (cached.width, cached.height) { let dims_str = format!("{}x{}px", w, h); let available = 80 - time_str.len() - 1 - 9 - 7 - mime_short.len() - size_str.len() - 2; if dims_str.len() <= available { println!( "{} [IMG:{} {} {}] #{}", time_str, dims_str, mime_short, size_str, hash_prefix ); } else { println!( "{} [IMG:{} {}] #{}", time_str, mime_short, size_str, hash_prefix ); } } else { println!( "{} [IMG:{} {}] #{}", time_str, mime_short, size_str, hash_prefix ); } } _ => {} } Ok(()) } fn truncate_to_fit(&self, text: &str, max_chars: usize) -> String { let text = text.replace('\n', " ").replace('\t', " "); if text.len() <= max_chars { text } else { let mut end = max_chars.saturating_sub(1).min(text.len()); while !text.is_char_boundary(end) && end > 0 { end -= 1; } format!("{}…", &text[..end]) } } async fn copy_selection(&mut self, selection: &str) -> Result<()> { // extract hash from #hash format anywhere in string if let Some(hash_pos) = selection.rfind('#') { let hash_start = hash_pos + 1; let hash_end = selection[hash_start..] .find(|c: char| c.is_whitespace()) .map(|i| hash_start + i) .unwrap_or(selection.len()); let hash = &selection[hash_start..hash_end]; if !hash.is_empty() && hash.len() >= 8 { match self.copy_by_hash(hash).await { Ok(_) => return Ok(()), Err(_) => { // fallback to literal copy } } } } // copy as literal text let mut clipboard = self.clipboard.write(); clipboard.set_text(selection.to_string())?; Ok(()) } async fn copy_by_hash(&self, hash_prefix: &str) -> Result<()> { // cache lookup first for performance for (full_hash, cached) in self.cache.iter_sorted() { if full_hash.starts_with(hash_prefix) { return match cached.content_type.as_str() { "text" => { if let Some(text) = &cached.inline_text { let mut clipboard = self.clipboard.write(); clipboard.set_text(text.clone())?; Ok(()) } else { anyhow::bail!("Missing text content") } } "text_file" => { if let Some(fp) = &cached.file_path { let path = self.config.cache_dir.join("blobs").join(fp); let mut text = if cached.compressed { use zstd::stream::read::Decoder; let file = fs::File::open(path)?; let mut decoder = Decoder::new(file)?; let mut text = String::new(); decoder.read_to_string(&mut text)?; text } else { fs::read_to_string(path)? }; let mut clipboard = self.clipboard.write(); clipboard.set_text(text.clone())?; text.zeroize(); Ok(()) } else { anyhow::bail!("Missing file path") } } "image" => { if let Some(fp) = &cached.file_path { let path = self.config.cache_dir.join("blobs").join(fp); let data = fs::read(&path)?; let img = image::load_from_memory(&data)?; let rgba = img.to_rgba8(); let (width, height) = (rgba.width() as usize, rgba.height() as usize); let img_data = ImageData { width, height, bytes: rgba.into_raw().into(), }; let mut clipboard = self.clipboard.write(); clipboard.set_image(img_data)?; Ok(()) } else { anyhow::bail!("Missing image path") } } _ => anyhow::bail!("Unknown content type"), }; } } // cache miss, query database let row: Option<(String, Option, Option, Option)> = self .db .query_row( "SELECT content_type, inline_text, file_path, compressed FROM entries WHERE hash LIKE ?1 || '%' ORDER BY timestamp DESC LIMIT 1", params![hash_prefix], |row| { Ok(( row.get(0)?, row.get(1)?, row.get(2)?, row.get::<_, Option>(3)?.map(|v| v != 0), )) }, ) .optional()?; if let Some((content_type, inline_text, file_path, compressed)) = row { match content_type.as_str() { "text" => { if let Some(text) = inline_text { let mut clipboard = self.clipboard.write(); clipboard.set_text(text)?; } } "text_file" => { if let Some(fp) = file_path { let path = self.config.cache_dir.join("blobs").join(&fp); let mut text = if compressed.unwrap_or(false) { use zstd::stream::read::Decoder; let file = fs::File::open(path)?; let mut decoder = Decoder::new(file)?; let mut text = String::new(); decoder.read_to_string(&mut text)?; text } else { fs::read_to_string(path)? }; let mut clipboard = self.clipboard.write(); clipboard.set_text(text.clone())?; text.zeroize(); } } "image" => { if let Some(fp) = file_path { let path = self.config.cache_dir.join("blobs").join(&fp); let data = fs::read(&path)?; let img = image::load_from_memory(&data)?; let rgba = img.to_rgba8(); let (width, height) = (rgba.width() as usize, rgba.height() as usize); let img_data = ImageData { width, height, bytes: rgba.into_raw().into(), }; let mut clipboard = self.clipboard.write(); clipboard.set_image(img_data)?; } } _ => anyhow::bail!("Unknown content type"), } } else { anyhow::bail!("Entry not found for hash: {}", hash_prefix); } Ok(()) } pub fn clear(&mut self) -> Result<()> { self.last_clipboard_hash = None; self.cache.clear(); { let mut clipboard = self.clipboard.write(); let _ = clipboard.set_text(""); } let mut stmt = self.db.prepare("SELECT hash FROM entries")?; let all_hashes: Vec = stmt .query_map([], |row| row.get(0))? .collect::, _>>()?; drop(stmt); for hash in &all_hashes { self.delete_entry(hash)?; } self.db.execute("VACUUM", [])?; let blobs_dir = self.config.cache_dir.join("blobs"); if blobs_dir.exists() { for entry in fs::read_dir(&blobs_dir)? { let entry = entry?; let path = entry.path(); if path.is_file() { let _ = fs::remove_file(path); } } } Ok(()) } pub fn purge_all(&mut self) -> Result<()> { self.clear()?; if self.config.cache_dir.exists() { fs::remove_dir_all(&self.config.cache_dir)?; } Ok(()) } pub fn prune(&mut self, hashes: &[String]) -> Result<()> { for hash in hashes { self.delete_entry(hash)?; } Ok(()) } pub fn get_history(&self, limit: usize) -> Result> { let mut stmt = self.db.prepare( "SELECT id, hash, timestamp, app_name, content_type, inline_text, file_path, mime_type, size_bytes, compressed, width, height FROM entries ORDER BY timestamp DESC LIMIT ?1" )?; let entries = stmt .query_map([limit as i64], |row| { let id: i64 = row.get(0)?; let hash: String = row.get(1)?; let timestamp: i64 = row.get(2)?; let app_name: String = row.get(3)?; let content_type: String = row.get(4)?; let inline_text: Option = row.get(5)?; let _file_path: Option = row.get(6)?; let mime_type: Option = row.get(7)?; let size_bytes: i64 = row.get(8)?; let compressed: Option = row.get::<_, Option>(9)?.map(|v| v != 0); let width: Option = row.get(10)?; let height: Option = row.get(11)?; let content = match content_type.as_str() { "text" => ContentType::Text(inline_text.unwrap_or_default()), "text_file" => ContentType::TextFile { hash: hash.clone(), compressed: compressed.unwrap_or(false), }, "image" => ContentType::Image { mime: mime_type.unwrap_or_else(|| "image/unknown".to_string()), hash: hash.clone(), width: width.unwrap_or(0) as u32, height: height.unwrap_or(0) as u32, }, _ => ContentType::Text("Unknown".to_string()), }; Ok(Entry { id: Some(id), hash, timestamp: timestamp as u64, app_name, content, size_bytes: size_bytes as usize, }) })? .collect::, _>>()?; Ok(entries) } pub fn get_entries(&self, limit: usize) -> Result> { let mut stmt = self.db.prepare( "SELECT hash, content_type, inline_text, file_path, size_bytes, timestamp FROM entries ORDER BY timestamp DESC LIMIT ?1", )?; let rows = stmt .query_map([limit as i64], |row| { let hash: String = row.get(0)?; let content_type: String = row.get(1)?; let inline_text: Option = row.get(2)?; let file_path: Option = row.get(3)?; let size_bytes: i64 = row.get(4)?; let timestamp: i64 = row.get(5)?; let content = match content_type.as_str() { "text" => inline_text.unwrap_or_else(|| "[Empty]".to_string()), "text_file" => { if let Some(fp) = file_path { self.read_text_preview(&fp, self.config.max_display_length) .unwrap_or_else(|| { format!("[Text: {}]", self.format_size(size_bytes)) }) } else { format!("[Text: {}]", self.format_size(size_bytes)) } } "image" => format!("[Image: {}]", self.format_size(size_bytes)), _ => "[Unknown]".to_string(), }; Ok((hash, content, content_type, size_bytes, timestamp)) })? .collect::, _>>()?; Ok(rows) } } === core/src/main.rs === use anyhow::{Context, Result}; use clap::{Parser, Subcommand}; use nocb::{ClipboardManager, Config}; use std::path::PathBuf; use tokio::signal; #[derive(Parser)] #[command(name = "nocb")] #[command(version = "1.1.5")] #[command(about = "nearly optimal clipboard manager")] struct Cli { #[command(subcommand)] command: Commands, } #[derive(Subcommand)] enum Commands { /// Run clipboard daemon Daemon, /// Print clipboard history for rofi Print, /// Copy selection to clipboard (reads from stdin if no args) Copy { /// Selection text or image reference to copy #[arg(trailing_var_arg = true)] selection: Vec, }, /// Clear clipboard history Clear, /// Remove entries by hash list Prune { /// File containing hash list or direct hashes #[arg(value_name = "HASHES")] input: Vec, }, } #[tokio::main] async fn main() -> Result<()> { let cli = Cli::parse(); // platform-specific display check #[cfg(target_os = "linux")] { if std::env::var("DISPLAY").is_err() && std::env::var("WAYLAND_DISPLAY").is_err() { eprintln!("Error: No display server available (neither X11 nor Wayland)"); std::process::exit(1); } } #[cfg(not(target_os = "linux"))] { // arboard handles clipboard natively on macos/windows } let config = Config::load().context("Failed to load configuration")?; match cli.command { Commands::Daemon => { let mut manager = ClipboardManager::new(config).await?; tokio::select! { result = manager.run_daemon() => { if let Err(e) = result { eprintln!("Daemon error: {}", e); std::process::exit(1); } } _ = signal::ctrl_c() => { println!("\nShutting down..."); } } } Commands::Print => { let manager = ClipboardManager::new(config).await?; manager.print_history()?; } Commands::Copy { selection } => { let selection = if selection.is_empty() { use std::io::{self, Read}; let mut buffer = String::new(); io::stdin().read_to_string(&mut buffer)?; buffer.trim().to_string() } else { selection.join(" ") }; if selection.is_empty() { return Ok(()); } ClipboardManager::send_copy_command(&selection).await?; } Commands::Clear => { nocb::ClipboardManager::send_command("CLEAR").await?; println!("Clear command sent to daemon"); } Commands::Prune { input } => { let hashes = if input.len() == 1 && PathBuf::from(&input[0]).exists() { let content = std::fs::read_to_string(&input[0]).context("Failed to read hash file")?; content.lines().map(|s| s.trim().to_string()).collect() } else { input }; nocb::ClipboardManager::send_command(&format!("PRUNE:{}", hashes.join(","))).await?; println!("Prune command sent to daemon for {} entries", hashes.len()); } } Ok(()) } === default.nix === { lib , rustPlatform , fetchFromGitHub , pkg-config , sqlite , libX11 , libxcb }: rustPlatform.buildRustPackage rec { pname = "nocb"; version = "0.2.0"; src = fetchFromGitHub { owner = "hitchhooker"; repo = "nocb"; rev = "v${version}"; hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="; }; cargoHash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="; nativeBuildInputs = [ pkg-config ]; buildInputs = [ sqlite libX11 libxcb ]; meta = with lib; { description = "X11 clipboard manager with compression and blob storage"; homepage = "https://github.com/hitchhooker/nocb"; license = licenses.mit; maintainers = with maintainers; [ ]; platforms = platforms.linux; mainProgram = "nocb"; }; } === flake.nix === { description = "X11 clipboard manager with compression and blob storage"; inputs = { nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; flake-utils.url = "github:numtide/flake-utils"; }; outputs = { self, nixpkgs, flake-utils, ... }: flake-utils.lib.eachDefaultSystem (system: let pkgs = import nixpkgs { inherit system; }; in with pkgs; { packages.default = rustPlatform.buildRustPackage rec { pname = "nocb"; version = "0.2.0"; src = ./.; cargoLock = { lockFile = ./Cargo.lock; }; nativeBuildInputs = [ pkg-config ]; buildInputs = [ sqlite libX11 libxcb ]; meta = { description = "X11 clipboard manager with compression and blob storage"; homepage = "https://github.com/hitchhooker/nocb"; license = lib.licenses.mit; mainProgram = "nocb"; }; }; devShells.default = mkShell { buildInputs = [ rustc cargo pkg-config sqlite libX11 libxcb ]; }; }); } === nocb.service === [Unit] Description=NOCB Clipboard Manager After=graphical-session.target [Service] Type=simple ExecStart=/home/alice/.local/bin/nocb daemon Restart=on-failure RestartSec=5 Environment="DISPLAY=:0" [Install] WantedBy=default.target === shell.nix === { pkgs ? import {} }: let rust-overlay = import (builtins.fetchTarball { url = "https://github.com/oxalica/rust-overlay/archive/master.tar.gz"; }); pkgs-with-overlay = import { overlays = [ rust-overlay ]; }; rust-nightly = pkgs-with-overlay.rust-bin.nightly.latest.default; in pkgs.mkShell { buildInputs = with pkgs; [ # Build tools - nightly Rust rust-nightly pkg-config # X11/XCB dependencies xorg.libxcb xorg.libX11 xorg.libXcursor xorg.libXrandr xorg.libXi # OpenGL/Vulkan support libGL vulkan-loader vulkan-headers vulkan-validation-layers # libxkbcommon libxkbcommon # Mesa mesa # Optional: for development rust-analyzer clippy rustfmt ]; # Set up pkg-config paths PKG_CONFIG_PATH = "${pkgs.xorg.libxcb}/lib/pkgconfig"; # Set LD_LIBRARY_PATH for runtime LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath [ pkgs.libGL pkgs.xorg.libX11 pkgs.xorg.libXcursor pkgs.xorg.libXrandr pkgs.xorg.libXi pkgs.vulkan-loader pkgs.libxkbcommon ]; }