Initial commit

This commit is contained in:
2026-03-25 09:32:02 +01:00
commit 23ce1b7ee2
77 changed files with 21169 additions and 0 deletions

22
brittle-core/Cargo.toml Normal file
View File

@@ -0,0 +1,22 @@
[package]
name = "brittle-core"
version = "0.1.0"
edition = "2024"
[dependencies]
chrono = { version = "0.4", features = ["serde"] }
git2 = { version = "0.20", features = ["vendored-libgit2"] }
serde = { version = "1", features = ["derive"] }
sha2 = "0.10"
thiserror = "2"
toml = "0.8"
ureq = "2"
uuid = { version = "1", features = ["v7", "serde"] }
[[bin]]
name = "brittle-seed"
path = "src/bin/seed.rs"
[dev-dependencies]
serde_json = "1"
tempfile = "3"

View File

@@ -0,0 +1,179 @@
use crate::bibtex::validation::validate_for_export;
use crate::error::BibtexError;
use crate::model::{Person, Reference};
/// Escape a BibTeX field value by wrapping special characters in braces.
/// Handles `{`, `}`, `\`, and preserves existing braced groups.
fn escape_field(value: &str) -> String {
// Wrap the whole value in braces — simple and safe for most content.
// This prevents BibTeX from case-folding titles and handles special chars.
format!("{{{value}}}")
}
/// Format a list of persons as a BibTeX "and"-separated author string.
fn format_persons(persons: &[Person]) -> String {
persons
.iter()
.map(|p| p.to_bibtex())
.collect::<Vec<_>>()
.join(" and ")
}
/// Export a single reference as a BibTeX entry string.
///
/// Returns an error if required fields are missing.
pub fn export_reference(reference: &Reference) -> Result<String, BibtexError> {
validate_for_export(reference)?;
let mut out = String::new();
out.push('@');
out.push_str(reference.entry_type.bibtex_name());
out.push('{');
out.push_str(&reference.cite_key);
out.push_str(",\n");
// Authors and editors come first for readability.
if !reference.authors.is_empty() {
let authors = format_persons(&reference.authors);
out.push_str(&format!(" author = {},\n", escape_field(&authors)));
}
if !reference.editors.is_empty() {
let editors = format_persons(&reference.editors);
out.push_str(&format!(" editor = {},\n", escape_field(&editors)));
}
// All other fields in sorted order (BTreeMap guarantees this).
for (key, value) in &reference.fields {
out.push_str(&format!(" {key} = {},\n", escape_field(value)));
}
out.push('}');
Ok(out)
}
/// Export multiple references as a `.bib` file string.
///
/// Skips references with missing required fields and collects all errors.
/// Returns the BibTeX string and a list of any export errors.
pub fn export_references(references: &[Reference]) -> (String, Vec<BibtexError>) {
let mut out = String::new();
let mut errors = Vec::new();
for reference in references {
match export_reference(reference) {
Ok(entry) => {
if !out.is_empty() {
out.push('\n');
}
out.push_str(&entry);
out.push('\n');
}
Err(e) => errors.push(e),
}
}
(out, errors)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::model::{EntryType, Person, Reference};
fn make_article() -> Reference {
let mut r = Reference::new("turing1950", EntryType::Article);
r.authors.push(Person {
family: "Turing".into(),
given: Some("Alan M.".into()),
prefix: None,
suffix: None,
});
r.fields.insert(
"title".into(),
"Computing Machinery and Intelligence".into(),
);
r.fields.insert("journal".into(), "Mind".into());
r.fields.insert("year".into(), "1950".into());
r.fields.insert("volume".into(), "59".into());
r
}
#[test]
fn article_export() {
let r = make_article();
let bibtex = export_reference(&r).unwrap();
assert!(bibtex.starts_with("@article{turing1950,"));
assert!(bibtex.contains("author = {Turing, Alan M.}"));
assert!(bibtex.contains("title = {Computing Machinery and Intelligence}"));
assert!(bibtex.contains("journal = {Mind}"));
assert!(bibtex.contains("year = {1950}"));
}
#[test]
fn multi_author_formatting() {
let mut r = Reference::new("ab2024", EntryType::Article);
r.authors.push(Person {
family: "Doe".into(),
given: Some("Jane".into()),
prefix: None,
suffix: None,
});
r.authors.push(Person {
family: "Smith".into(),
given: Some("John".into()),
prefix: None,
suffix: None,
});
r.fields.insert("title".into(), "A Paper".into());
r.fields.insert("journal".into(), "Nature".into());
r.fields.insert("year".into(), "2024".into());
let bibtex = export_reference(&r).unwrap();
assert!(bibtex.contains("Doe, Jane and Smith, John"));
}
#[test]
fn missing_required_field_returns_error() {
let mut r = make_article();
r.fields.remove("journal");
assert!(export_reference(&r).is_err());
}
#[test]
fn book_with_editor() {
let mut r = Reference::new("knuth1986", EntryType::Book);
r.editors.push(Person::new("Knuth"));
r.fields.insert("title".into(), "The TeXbook".into());
r.fields.insert("publisher".into(), "Addison-Wesley".into());
r.fields.insert("year".into(), "1986".into());
let bibtex = export_reference(&r).unwrap();
assert!(bibtex.starts_with("@book{"));
assert!(bibtex.contains("editor = {Knuth}"));
}
#[test]
fn fields_appear_in_sorted_order() {
let r = make_article();
let bibtex = export_reference(&r).unwrap();
let journal_pos = bibtex.find("journal").unwrap();
let title_pos = bibtex.find("title").unwrap();
let year_pos = bibtex.find("year").unwrap();
// BTreeMap order: journal < title < volume < year (alphabetical)
assert!(journal_pos < title_pos);
assert!(title_pos < year_pos);
}
#[test]
fn export_references_collects_errors() {
let good = make_article();
let bad = Reference::new("incomplete", EntryType::Article);
// Missing author, title, journal, year
let (bibtex, errors) = export_references(&[good, bad]);
assert_eq!(errors.len(), 1);
assert!(bibtex.contains("@article{turing1950,"));
}
}

View File

@@ -0,0 +1,5 @@
pub mod export;
pub mod validation;
pub use export::{export_reference, export_references};
pub use validation::validate_for_export;

View File

@@ -0,0 +1,104 @@
use crate::error::BibtexError;
use crate::model::{EntryType, Reference};
/// Returns the required fields for a given BibTeX entry type.
fn required_fields(entry_type: &EntryType) -> &'static [&'static str] {
match entry_type {
EntryType::Article => &["author", "title", "journal", "year"],
EntryType::Book => &["title", "publisher", "year"],
EntryType::Booklet => &["title"],
EntryType::InBook => &["title", "publisher", "year", "chapter"],
EntryType::InCollection => &["author", "title", "booktitle", "publisher", "year"],
EntryType::InProceedings => &["author", "title", "booktitle", "year"],
EntryType::Manual => &["title"],
EntryType::MastersThesis => &["author", "title", "school", "year"],
EntryType::Misc => &[],
EntryType::PhdThesis => &["author", "title", "school", "year"],
EntryType::Proceedings => &["title", "year"],
EntryType::TechReport => &["author", "title", "institution", "year"],
EntryType::Unpublished => &["author", "title", "note"],
EntryType::Online => &["title", "url"],
}
}
/// Validate that a reference has all required fields for BibTeX export.
/// Returns an error describing the first missing required field found.
pub fn validate_for_export(reference: &Reference) -> Result<(), BibtexError> {
let required = required_fields(&reference.entry_type);
for &field in required {
let present = match field {
"author" => !reference.authors.is_empty(),
"editor" => !reference.editors.is_empty(),
_ => reference.fields.contains_key(field),
};
if !present {
return Err(BibtexError::MissingRequiredField {
cite_key: reference.cite_key.clone(),
entry_type: reference.entry_type.bibtex_name().to_owned(),
field: field.to_owned(),
});
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::model::{EntryType, Person, Reference};
fn make_article() -> Reference {
let mut r = Reference::new("doe2024", EntryType::Article);
r.authors.push(Person::new("Doe"));
r.fields.insert("title".into(), "A Paper".into());
r.fields.insert("journal".into(), "Nature".into());
r.fields.insert("year".into(), "2024".into());
r
}
#[test]
fn valid_article_passes() {
let r = make_article();
assert!(validate_for_export(&r).is_ok());
}
#[test]
fn article_missing_author_fails() {
let mut r = make_article();
r.authors.clear();
let err = validate_for_export(&r).unwrap_err();
assert!(
matches!(err, BibtexError::MissingRequiredField { field, .. } if field == "author")
);
}
#[test]
fn article_missing_journal_fails() {
let mut r = make_article();
r.fields.remove("journal");
let err = validate_for_export(&r).unwrap_err();
assert!(
matches!(err, BibtexError::MissingRequiredField { field, .. } if field == "journal")
);
}
#[test]
fn misc_has_no_required_fields() {
let r = Reference::new("anon", EntryType::Misc);
assert!(validate_for_export(&r).is_ok());
}
#[test]
fn phd_thesis_requires_school() {
let mut r = Reference::new("smith2020", EntryType::PhdThesis);
r.authors.push(Person::new("Smith"));
r.fields.insert("title".into(), "A Thesis".into());
r.fields.insert("year".into(), "2020".into());
let err = validate_for_export(&r).unwrap_err();
assert!(
matches!(err, BibtexError::MissingRequiredField { field, .. } if field == "school")
);
}
}

View File

@@ -0,0 +1,286 @@
//! Creates an example Brittle repository with realistic academic references.
//!
//! For references that have freely available PDFs (arXiv preprints and open
//! author copies), the script downloads the PDF and attaches it to the
//! reference. Downloads that fail are skipped with a warning so the seed
//! always completes even without network access.
//!
//! Usage:
//! brittle-seed [PATH]
//!
//! PATH defaults to `~/brittle-example`. The directory must not already
//! contain a git repository.
use std::io::Read;
use std::path::PathBuf;
use brittle_core::{Brittle, EntryType, FsStore, Person, ReferenceId};
fn main() {
let path = match std::env::args().nth(1) {
Some(p) => PathBuf::from(p),
None => {
let home = std::env::var("HOME").expect("HOME not set");
PathBuf::from(home).join("brittle-example")
}
};
if path.join(".git").exists() {
eprintln!("error: {} already contains a git repository", path.display());
std::process::exit(1);
}
std::fs::create_dir_all(&path).expect("could not create directory");
println!("Creating repository at {}", path.display());
let mut b = Brittle::create(&path).expect("create repository");
// ── Libraries ─────────────────────────────────────────────────────────────
let cs = b.create_library("Computer Science", None).unwrap();
let ml = b.create_library("Machine Learning", Some(cs.id)).unwrap();
let sys = b.create_library("Systems", Some(cs.id)).unwrap();
let math = b.create_library("Mathematics", None).unwrap();
let pl = b.create_library("Programming Languages", Some(cs.id)).unwrap();
// ── References ────────────────────────────────────────────────────────────
// -- Machine Learning --
let mut r = b.create_reference("lecun1998gradient", EntryType::Article).unwrap();
r.authors = vec![
person("LeCun", "Yann"),
person("Bottou", "Léon"),
person("Bengio", "Yoshua"),
person("Haffner", "Patrick"),
];
r.fields.insert("title".into(), "Gradient-based learning applied to document recognition".into());
r.fields.insert("journal".into(), "Proceedings of the IEEE".into());
r.fields.insert("volume".into(), "86".into());
r.fields.insert("number".into(), "11".into());
r.fields.insert("pages".into(), "2278--2324".into());
r.fields.insert("year".into(), "1998".into());
let id = r.id;
b.update_reference(r).unwrap();
b.add_to_library(ml.id, id).unwrap();
attach_pdf(&mut b, id, "http://yann.lecun.com/exdb/publis/pdf/lecun-01a.pdf");
let mut r = b.create_reference("vaswani2017attention", EntryType::InProceedings).unwrap();
r.authors = vec![
person("Vaswani", "Ashish"),
person("Shazeer", "Noam"),
person("Parmar", "Niki"),
person("Uszkoreit", "Jakob"),
person("Jones", "Llion"),
person("Gomez", "Aidan N."),
person("Kaiser", "Łukasz"),
person("Polosukhin", "Illia"),
];
r.fields.insert("title".into(), "Attention Is All You Need".into());
r.fields.insert("booktitle".into(), "Advances in Neural Information Processing Systems".into());
r.fields.insert("volume".into(), "30".into());
r.fields.insert("year".into(), "2017".into());
let id = r.id;
b.update_reference(r).unwrap();
b.add_to_library(ml.id, id).unwrap();
attach_pdf(&mut b, id, "https://arxiv.org/pdf/1706.03762");
let mut r = b.create_reference("goodfellow2016deep", EntryType::Book).unwrap();
r.authors = vec![
person("Goodfellow", "Ian"),
person("Bengio", "Yoshua"),
person("Courville", "Aaron"),
];
r.fields.insert("title".into(), "Deep Learning".into());
r.fields.insert("publisher".into(), "MIT Press".into());
r.fields.insert("year".into(), "2016".into());
r.fields.insert("url".into(), "http://www.deeplearningbook.org".into());
let id = r.id;
b.update_reference(r).unwrap();
b.add_to_library(ml.id, id).unwrap();
// No freely available PDF for this book.
let mut r = b.create_reference("ho2020denoising", EntryType::InProceedings).unwrap();
r.authors = vec![
person("Ho", "Jonathan"),
person("Jain", "Ajay"),
person("Abbeel", "Pieter"),
];
r.fields.insert("title".into(), "Denoising Diffusion Probabilistic Models".into());
r.fields.insert("booktitle".into(), "Advances in Neural Information Processing Systems".into());
r.fields.insert("volume".into(), "33".into());
r.fields.insert("pages".into(), "6840--6851".into());
r.fields.insert("year".into(), "2020".into());
let id = r.id;
b.update_reference(r).unwrap();
b.add_to_library(ml.id, id).unwrap();
attach_pdf(&mut b, id, "https://arxiv.org/pdf/2006.11239");
// -- Systems --
let mut r = b.create_reference("lamport1978time", EntryType::Article).unwrap();
r.authors = vec![person("Lamport", "Leslie")];
r.fields.insert("title".into(), "Time, Clocks, and the Ordering of Events in a Distributed System".into());
r.fields.insert("journal".into(), "Communications of the ACM".into());
r.fields.insert("volume".into(), "21".into());
r.fields.insert("number".into(), "7".into());
r.fields.insert("pages".into(), "558--565".into());
r.fields.insert("year".into(), "1978".into());
let id = r.id;
b.update_reference(r).unwrap();
b.add_to_library(sys.id, id).unwrap();
attach_pdf(&mut b, id, "https://lamport.azurewebsites.net/pubs/time-clocks.pdf");
let mut r = b.create_reference("rosenblum1992lfs", EntryType::Article).unwrap();
r.authors = vec![
person("Rosenblum", "Mendel"),
person("Ousterhout", "John K."),
];
r.fields.insert("title".into(), "The Design and Implementation of a Log-Structured File System".into());
r.fields.insert("journal".into(), "ACM Transactions on Computer Systems".into());
r.fields.insert("volume".into(), "10".into());
r.fields.insert("number".into(), "1".into());
r.fields.insert("pages".into(), "26--52".into());
r.fields.insert("year".into(), "1992".into());
let id = r.id;
b.update_reference(r).unwrap();
b.add_to_library(sys.id, id).unwrap();
// Paywalled; no freely available PDF.
let mut r = b.create_reference("dean2004mapreduce", EntryType::InProceedings).unwrap();
r.authors = vec![
person("Dean", "Jeffrey"),
person("Ghemawat", "Sanjay"),
];
r.fields.insert("title".into(), "MapReduce: Simplified Data Processing on Large Clusters".into());
r.fields.insert("booktitle".into(), "OSDI".into());
r.fields.insert("pages".into(), "137--150".into());
r.fields.insert("year".into(), "2004".into());
let id = r.id;
b.update_reference(r).unwrap();
b.add_to_library(sys.id, id).unwrap();
attach_pdf(&mut b, id, "https://static.googleusercontent.com/media/research.google.com/en//archive/mapreduce-osdi04.pdf");
// -- Programming Languages --
let mut r = b.create_reference("milner1978polymorphism", EntryType::Article).unwrap();
r.authors = vec![person("Milner", "Robin")];
r.fields.insert("title".into(), "A Theory of Type Polymorphism in Programming".into());
r.fields.insert("journal".into(), "Journal of Computer and System Sciences".into());
r.fields.insert("volume".into(), "17".into());
r.fields.insert("number".into(), "3".into());
r.fields.insert("pages".into(), "348--375".into());
r.fields.insert("year".into(), "1978".into());
let id = r.id;
b.update_reference(r).unwrap();
b.add_to_library(pl.id, id).unwrap();
// Paywalled; no freely available PDF.
let mut r = b.create_reference("matsakis2014rust", EntryType::InProceedings).unwrap();
r.authors = vec![
person("Matsakis", "Nicholas D."),
person("Klock", "Felix S."),
];
r.fields.insert("title".into(), "The Rust Language".into());
r.fields.insert("booktitle".into(), "ACM SIGAda Annual Conference on High Integrity Language Technology".into());
r.fields.insert("pages".into(), "103--104".into());
r.fields.insert("year".into(), "2014".into());
let id = r.id;
b.update_reference(r).unwrap();
b.add_to_library(pl.id, id).unwrap();
// Paywalled; no freely available PDF.
// -- Mathematics --
let mut r = b.create_reference("turing1936computable", EntryType::Article).unwrap();
r.authors = vec![person("Turing", "Alan M.")];
r.fields.insert("title".into(), "On Computable Numbers, with an Application to the Entscheidungsproblem".into());
r.fields.insert("journal".into(), "Proceedings of the London Mathematical Society".into());
r.fields.insert("volume".into(), "42".into());
r.fields.insert("number".into(), "1".into());
r.fields.insert("pages".into(), "230--265".into());
r.fields.insert("year".into(), "1936".into());
let id = r.id;
b.update_reference(r).unwrap();
b.add_to_library(math.id, id).unwrap();
// No freely available PDF.
let mut r = b.create_reference("knuth1984texbook", EntryType::Book).unwrap();
r.authors = vec![person("Knuth", "Donald E.")];
r.fields.insert("title".into(), "The TeXbook".into());
r.fields.insert("publisher".into(), "Addison-Wesley".into());
r.fields.insert("year".into(), "1984".into());
r.fields.insert("series".into(), "Computers and Typesetting".into());
r.fields.insert("volume".into(), "A".into());
let id = r.id;
b.update_reference(r).unwrap();
b.add_to_library(math.id, id).unwrap();
// Copyrighted book; no freely available PDF.
// A reference in both ML and Mathematics (cross-library membership).
let mut r = b.create_reference("cybenko1989approximation", EntryType::Article).unwrap();
r.authors = vec![person("Cybenko", "George")];
r.fields.insert("title".into(), "Approximation by Superpositions of a Sigmoidal Function".into());
r.fields.insert("journal".into(), "Mathematics of Control, Signals, and Systems".into());
r.fields.insert("volume".into(), "2".into());
r.fields.insert("number".into(), "4".into());
r.fields.insert("pages".into(), "303--314".into());
r.fields.insert("year".into(), "1989".into());
let id = r.id;
b.update_reference(r).unwrap();
b.add_to_library(ml.id, id).unwrap();
b.add_to_library(math.id, id).unwrap();
// Paywalled; no freely available PDF.
println!();
println!("Done.");
println!();
println!(" Libraries : Computer Science (Machine Learning, Systems, Programming Languages), Mathematics");
println!(" References: 12 across all libraries");
println!();
println!("Open the repository in Brittle with: :open {}", path.display());
}
// ── PDF download ──────────────────────────────────────────────────────────────
/// Download the PDF at `url` and attach it to `id`. Prints progress and
/// skips silently on any error so the seed always completes.
fn attach_pdf(b: &mut Brittle<FsStore>, id: ReferenceId, url: &str) {
let label = url.rsplit('/').next().unwrap_or(url);
print!("{label}");
std::io::Write::flush(&mut std::io::stdout()).ok();
match download(url) {
Err(e) => println!("skipped ({e})"),
Ok(bytes) => {
let tmp = std::env::temp_dir().join(format!("{id}.pdf"));
if let Err(e) = std::fs::write(&tmp, &bytes) {
println!("skipped (write: {e})");
return;
}
match b.attach_pdf(id, &tmp) {
Ok(_) => println!("{} KB", bytes.len() / 1024),
Err(e) => println!("skipped (attach: {e})"),
}
let _ = std::fs::remove_file(&tmp);
}
}
}
fn download(url: &str) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
let resp = ureq::get(url).call()?;
let mut buf = Vec::new();
resp.into_reader().read_to_end(&mut buf)?;
Ok(buf)
}
// ── Helpers ───────────────────────────────────────────────────────────────────
fn person(family: &str, given: &str) -> Person {
Person {
family: family.into(),
given: Some(given.into()),
prefix: None,
suffix: None,
}
}

104
brittle-core/src/error.rs Normal file
View File

@@ -0,0 +1,104 @@
use std::path::PathBuf;
use thiserror::Error;
/// Top-level error returned from all public Brittle API methods.
#[derive(Debug, Error)]
pub enum BrittleError {
#[error("{0}")]
Store(#[from] StoreError),
#[error("{0}")]
Validation(#[from] ValidationError),
#[error("{0}")]
BibTeX(#[from] BibtexError),
}
/// Errors from the storage layer.
#[derive(Debug, Error)]
pub enum StoreError {
#[error("{entity_type} not found: {id}")]
NotFound { entity_type: EntityType, id: String },
#[error("I/O error: {0}")]
Io(#[from] std::io::Error),
#[error("serialization error: {message}")]
Serialization { message: String },
#[error("deserialization error for {path}: {message}")]
Deserialization { path: PathBuf, message: String },
#[error("git error: {0}")]
Git(#[from] git2::Error),
#[error("repository not found at {path}")]
RepoNotFound { path: PathBuf },
#[error("repository already exists at {path}")]
RepoAlreadyExists { path: PathBuf },
}
/// The kind of entity involved in a not-found error.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum EntityType {
Reference,
Library,
Annotation,
Snapshot,
}
impl std::fmt::Display for EntityType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
EntityType::Reference => write!(f, "Reference"),
EntityType::Library => write!(f, "Library"),
EntityType::Annotation => write!(f, "Annotation"),
EntityType::Snapshot => write!(f, "Snapshot"),
}
}
}
/// Business logic validation errors.
#[derive(Debug, Error)]
pub enum ValidationError {
#[error(
"library cycle detected: moving library {library_id} under {parent_id} would create a cycle"
)]
LibraryCycle {
library_id: String,
parent_id: String,
},
#[error("library {id} has children and cannot be deleted; delete or move children first")]
LibraryHasChildren { id: String },
#[error("cite key already exists: {cite_key}")]
DuplicateCiteKey { cite_key: String },
#[error("cite key cannot be empty")]
EmptyCiteKey,
#[error("library name cannot be empty")]
EmptyLibraryName,
#[error("reference {reference_id} has no PDF attached")]
NoPdfAttached { reference_id: String },
#[error("PDF file not found: {path}")]
PdfNotFound { path: PathBuf },
#[error("there are uncommitted changes; create a snapshot or call discard_changes() first")]
UncommittedChanges,
}
/// Errors specific to BibTeX export.
#[derive(Debug, Error)]
pub enum BibtexError {
#[error("reference '{cite_key}' ({entry_type}): missing required field '{field}'")]
MissingRequiredField {
cite_key: String,
entry_type: String,
field: String,
},
}

1051
brittle-core/src/lib.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,229 @@
use crate::model::ids::{AnnotationId, ReferenceId};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
/// A point in PDF coordinate space.
/// Origin is bottom-left; units are points (1/72 inch), matching ISO 32000.
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub struct Point {
pub x: f64,
pub y: f64,
}
/// A rectangle in PDF coordinate space.
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub struct Rect {
pub x: f64,
pub y: f64,
pub width: f64,
pub height: f64,
}
/// A quadrilateral for text markup annotations (highlight, underline, etc.).
/// Four points define one region, typically one line of text.
/// Matches the PDF spec QuadPoints representation (4 vertices per quad).
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub struct Quad {
pub points: [Point; 4],
}
/// RGBA color.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct Color {
pub r: u8,
pub g: u8,
pub b: u8,
pub a: u8,
}
impl Color {
pub const YELLOW: Color = Color {
r: 255,
g: 255,
b: 0,
a: 128,
};
pub const RED: Color = Color {
r: 255,
g: 0,
b: 0,
a: 128,
};
pub const GREEN: Color = Color {
r: 0,
g: 255,
b: 0,
a: 128,
};
}
/// The four text markup annotation types defined in ISO 32000.
/// All share the same QuadPoints-based geometry.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum TextMarkupType {
Highlight,
Underline,
Squiggly,
StrikeOut,
}
/// The kind of annotation and its type-specific geometry/data.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum AnnotationType {
/// Text markup (highlight, underline, squiggly, strikeout).
/// Uses QuadPoints per PDF spec for precise multi-line region selection.
TextMarkup {
markup_type: TextMarkupType,
quads: Vec<Quad>,
color: Color,
/// The selected text, stored for search and export without re-reading the PDF.
selected_text: Option<String>,
},
/// Sticky note (popup comment).
Note { position: Point },
/// Inline text box.
FreeText { rect: Rect },
/// Freehand ink drawing (e.g., circling a diagram).
Ink {
/// Multiple strokes, each a sequence of connected points.
paths: Vec<Vec<Point>>,
color: Color,
/// Stroke width in points.
width: f64,
},
/// Area/image selection for extracting figures from PDFs.
Area { rect: Rect },
}
/// A single annotation on a PDF page.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Annotation {
pub id: AnnotationId,
pub reference_id: ReferenceId,
/// 0-indexed physical page number.
pub page: u32,
/// Display page label (e.g., "iv", "23") — may differ from the physical page index.
pub page_label: Option<String>,
pub annotation_type: AnnotationType,
/// Free-form text: note body, comment on a highlight, etc.
pub content: Option<String>,
pub created_at: DateTime<Utc>,
pub modified_at: DateTime<Utc>,
}
impl Annotation {
pub fn new(reference_id: ReferenceId, page: u32, annotation_type: AnnotationType) -> Self {
let now = Utc::now();
Self {
id: AnnotationId::new(),
reference_id,
page,
page_label: None,
annotation_type,
content: None,
created_at: now,
modified_at: now,
}
}
}
/// All annotations for a single reference, stored as one file.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct AnnotationSet {
pub reference_id: ReferenceId,
pub annotations: Vec<Annotation>,
}
impl AnnotationSet {
pub fn new(reference_id: ReferenceId) -> Self {
Self {
reference_id,
annotations: Vec::new(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn make_highlight() -> AnnotationType {
AnnotationType::TextMarkup {
markup_type: TextMarkupType::Highlight,
quads: vec![Quad {
points: [
Point { x: 10.0, y: 20.0 },
Point { x: 100.0, y: 20.0 },
Point { x: 10.0, y: 30.0 },
Point { x: 100.0, y: 30.0 },
],
}],
color: Color::YELLOW,
selected_text: Some("important text".into()),
}
}
#[test]
fn annotation_serde_round_trip_highlight() {
let ref_id = ReferenceId::new();
let set = AnnotationSet {
reference_id: ref_id,
annotations: vec![Annotation::new(ref_id, 3, make_highlight())],
};
let toml_str = toml::to_string(&set).expect("serialize");
let set2: AnnotationSet = toml::from_str(&toml_str).expect("deserialize");
assert_eq!(set.reference_id, set2.reference_id);
assert_eq!(set.annotations.len(), set2.annotations.len());
assert_eq!(set.annotations[0].page, set2.annotations[0].page);
}
#[test]
fn annotation_serde_round_trip_ink() {
let ref_id = ReferenceId::new();
let ink = AnnotationType::Ink {
paths: vec![vec![Point { x: 0.0, y: 0.0 }, Point { x: 10.0, y: 10.0 }]],
color: Color::RED,
width: 2.0,
};
let set = AnnotationSet {
reference_id: ref_id,
annotations: vec![Annotation::new(ref_id, 0, ink)],
};
let toml_str = toml::to_string(&set).expect("serialize");
let set2: AnnotationSet = toml::from_str(&toml_str).expect("deserialize");
assert_eq!(set, set2);
}
#[test]
fn all_markup_types_serialize() {
let ref_id = ReferenceId::new();
for markup_type in [
TextMarkupType::Highlight,
TextMarkupType::Underline,
TextMarkupType::Squiggly,
TextMarkupType::StrikeOut,
] {
let ann = Annotation::new(
ref_id,
0,
AnnotationType::TextMarkup {
markup_type,
quads: vec![],
color: Color::GREEN,
selected_text: None,
},
);
let set = AnnotationSet {
reference_id: ref_id,
annotations: vec![ann],
};
let toml_str = toml::to_string(&set).expect("serialize");
let _: AnnotationSet = toml::from_str(&toml_str).expect("deserialize");
}
}
}

View File

@@ -0,0 +1,67 @@
use serde::{Deserialize, Serialize};
use std::fmt;
use uuid::Uuid;
macro_rules! define_id {
($name:ident) => {
#[derive(
Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize,
)]
pub struct $name(pub Uuid);
impl $name {
pub fn new() -> Self {
Self(Uuid::now_v7())
}
}
impl Default for $name {
fn default() -> Self {
Self::new()
}
}
impl From<Uuid> for $name {
fn from(uuid: Uuid) -> Self {
Self(uuid)
}
}
impl fmt::Display for $name {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
};
}
define_id!(ReferenceId);
define_id!(LibraryId);
define_id!(AnnotationId);
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn new_generates_unique_ids() {
let a = ReferenceId::new();
let b = ReferenceId::new();
assert_ne!(a, b);
}
#[test]
fn display_is_uuid_format() {
let id = ReferenceId::new();
let s = id.to_string();
assert_eq!(s.len(), 36); // UUID hyphenated format
}
#[test]
fn serde_round_trip() {
let id = LibraryId::new();
let json = serde_json::to_string(&id).unwrap();
let id2: LibraryId = serde_json::from_str(&json).unwrap();
assert_eq!(id, id2);
}
}

View File

@@ -0,0 +1,66 @@
use crate::model::ids::{LibraryId, ReferenceId};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::BTreeSet;
/// A named collection of references. Forms a tree via `parent_id`.
///
/// References are not "owned" by a library — they exist in a flat pool.
/// A reference can appear in multiple libraries (multi-membership).
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Library {
pub id: LibraryId,
pub name: String,
/// `None` means this is a root library (no parent).
pub parent_id: Option<LibraryId>,
/// The set of references that are members of this library.
/// BTreeSet for deterministic serialization order.
pub members: BTreeSet<ReferenceId>,
pub created_at: DateTime<Utc>,
pub modified_at: DateTime<Utc>,
}
impl Library {
pub fn new(name: impl Into<String>, parent_id: Option<LibraryId>) -> Self {
let now = Utc::now();
Self {
id: LibraryId::new(),
name: name.into(),
parent_id,
members: BTreeSet::new(),
created_at: now,
modified_at: now,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn library_serde_round_trip() {
let mut lib = Library::new("Machine Learning", None);
let ref_id = ReferenceId::new();
lib.members.insert(ref_id);
let toml_str = toml::to_string(&lib).expect("serialize to TOML");
let lib2: Library = toml::from_str(&toml_str).expect("deserialize from TOML");
assert_eq!(lib.id, lib2.id);
assert_eq!(lib.name, lib2.name);
assert_eq!(lib.members, lib2.members);
assert_eq!(lib.parent_id, lib2.parent_id);
}
#[test]
fn nested_library_serde_round_trip() {
let parent = Library::new("Science", None);
let child = Library::new("Physics", Some(parent.id));
let toml_str = toml::to_string(&child).expect("serialize to TOML");
let child2: Library = toml::from_str(&toml_str).expect("deserialize from TOML");
assert_eq!(child2.parent_id, Some(parent.id));
}
}

View File

@@ -0,0 +1,13 @@
pub mod annotation;
pub mod ids;
pub mod library;
pub mod reference;
pub mod snapshot;
pub use annotation::{
Annotation, AnnotationSet, AnnotationType, Color, Point, Quad, Rect, TextMarkupType,
};
pub use ids::{AnnotationId, LibraryId, ReferenceId};
pub use library::Library;
pub use reference::{EntryType, PdfAttachment, Person, Reference};
pub use snapshot::Snapshot;

View File

@@ -0,0 +1,241 @@
use crate::model::ids::ReferenceId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use std::fmt;
use std::path::PathBuf;
/// A person (author, editor, translator, etc.).
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Person {
pub family: String,
pub given: Option<String>,
/// Name prefix: "von", "de", "van der", etc.
pub prefix: Option<String>,
/// Name suffix: "Jr.", "III", etc.
pub suffix: Option<String>,
}
impl Person {
pub fn new(family: impl Into<String>) -> Self {
Self {
family: family.into(),
given: None,
prefix: None,
suffix: None,
}
}
/// Format for display: "Given prefix Family, Suffix" — natural reading order.
pub fn display_name(&self) -> String {
let mut parts = Vec::new();
if let Some(given) = &self.given {
parts.push(given.as_str());
}
if let Some(prefix) = &self.prefix {
parts.push(prefix.as_str());
}
parts.push(self.family.as_str());
let mut name = parts.join(" ");
if let Some(suffix) = &self.suffix {
name.push_str(", ");
name.push_str(suffix);
}
name
}
/// Format as BibTeX expects: "{prefix} {family}, {suffix}, {given}".
/// Falls back gracefully when optional parts are absent.
pub fn to_bibtex(&self) -> String {
let mut family_part = String::new();
if let Some(prefix) = &self.prefix {
family_part.push_str(prefix);
family_part.push(' ');
}
family_part.push_str(&self.family);
match (&self.suffix, &self.given) {
(Some(suffix), Some(given)) => {
format!("{family_part}, {suffix}, {given}")
}
(Some(suffix), None) => format!("{family_part}, {suffix}"),
(None, Some(given)) => format!("{family_part}, {given}"),
(None, None) => family_part,
}
}
}
impl fmt::Display for Person {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.display_name())
}
}
/// Standard BibTeX and common BibLaTeX entry types.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum EntryType {
Article,
Book,
Booklet,
InBook,
InCollection,
InProceedings,
Manual,
MastersThesis,
Misc,
PhdThesis,
Proceedings,
TechReport,
Unpublished,
Online,
}
impl EntryType {
/// The BibTeX entry type name as it appears in `.bib` files.
pub fn bibtex_name(&self) -> &'static str {
match self {
EntryType::Article => "article",
EntryType::Book => "book",
EntryType::Booklet => "booklet",
EntryType::InBook => "inbook",
EntryType::InCollection => "incollection",
EntryType::InProceedings => "inproceedings",
EntryType::Manual => "manual",
EntryType::MastersThesis => "mastersthesis",
EntryType::Misc => "misc",
EntryType::PhdThesis => "phdthesis",
EntryType::Proceedings => "proceedings",
EntryType::TechReport => "techreport",
EntryType::Unpublished => "unpublished",
EntryType::Online => "online",
}
}
}
/// A PDF file stored inside the Brittle repository.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct PdfAttachment {
/// Path relative to the repository root (e.g., `"pdfs/550e8400-....pdf"`).
pub stored_path: PathBuf,
/// SHA-256 hex digest of the file contents for integrity verification.
pub content_hash: String,
}
/// A citable work. The core entity of Brittle.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Reference {
pub id: ReferenceId,
/// The BibTeX cite key (e.g., `"knuth1984texbook"`). User-facing and mutable.
pub cite_key: String,
pub entry_type: EntryType,
/// Authors listed in order.
pub authors: Vec<Person>,
/// Editors (for edited books, proceedings, etc.).
pub editors: Vec<Person>,
/// All other fields (title, year, journal, volume, etc.) as plain strings.
/// BTreeMap for deterministic serialization order (important for git diffs).
pub fields: BTreeMap<String, String>,
pub pdf: Option<PdfAttachment>,
pub created_at: DateTime<Utc>,
pub modified_at: DateTime<Utc>,
}
impl Reference {
pub fn new(cite_key: impl Into<String>, entry_type: EntryType) -> Self {
let now = Utc::now();
Self {
id: ReferenceId::new(),
cite_key: cite_key.into(),
entry_type,
authors: Vec::new(),
editors: Vec::new(),
fields: BTreeMap::new(),
pdf: None,
created_at: now,
modified_at: now,
}
}
/// Returns the value of the `title` field, if present.
pub fn title(&self) -> Option<&str> {
self.fields.get("title").map(String::as_str)
}
/// Returns the value of the `year` field, if present.
pub fn year(&self) -> Option<&str> {
self.fields.get("year").map(String::as_str)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn person_bibtex_full() {
let p = Person {
family: "Dijkstra".into(),
given: Some("Edsger W.".into()),
prefix: None,
suffix: None,
};
assert_eq!(p.to_bibtex(), "Dijkstra, Edsger W.");
}
#[test]
fn person_bibtex_with_prefix() {
let p = Person {
family: "Beethoven".into(),
given: Some("Ludwig".into()),
prefix: Some("van".into()),
suffix: None,
};
assert_eq!(p.to_bibtex(), "van Beethoven, Ludwig");
}
#[test]
fn person_bibtex_with_suffix() {
let p = Person {
family: "King".into(),
given: Some("Martin Luther".into()),
prefix: None,
suffix: Some("Jr.".into()),
};
assert_eq!(p.to_bibtex(), "King, Jr., Martin Luther");
}
#[test]
fn person_bibtex_family_only() {
let p = Person::new("Aristotle");
assert_eq!(p.to_bibtex(), "Aristotle");
}
#[test]
fn reference_serde_round_trip() {
let mut r = Reference::new("doe2024", EntryType::Article);
r.authors.push(Person {
family: "Doe".into(),
given: Some("Jane".into()),
prefix: None,
suffix: None,
});
r.fields.insert("title".into(), "A Great Paper".into());
r.fields.insert("year".into(), "2024".into());
let toml_str = toml::to_string(&r).expect("serialize to TOML");
let r2: Reference = toml::from_str(&toml_str).expect("deserialize from TOML");
assert_eq!(r.id, r2.id);
assert_eq!(r.cite_key, r2.cite_key);
assert_eq!(r.authors, r2.authors);
assert_eq!(r.fields, r2.fields);
}
#[test]
fn entry_type_bibtex_names() {
assert_eq!(EntryType::Article.bibtex_name(), "article");
assert_eq!(EntryType::InProceedings.bibtex_name(), "inproceedings");
assert_eq!(EntryType::PhdThesis.bibtex_name(), "phdthesis");
}
}

View File

@@ -0,0 +1,12 @@
use chrono::{DateTime, Utc};
use serde::Serialize;
/// Metadata about a stored snapshot (git commit).
/// Not serialized to files — read directly from git history.
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
pub struct Snapshot {
/// Git commit SHA (hex string).
pub id: String,
pub message: String,
pub timestamp: DateTime<Utc>,
}

View File

@@ -0,0 +1,448 @@
use crate::error::{EntityType, StoreError};
use crate::model::{AnnotationSet, Library, LibraryId, Reference, ReferenceId, Snapshot};
use crate::store::Store;
use chrono::{DateTime, TimeZone, Utc};
use git2::{IndexAddOption, Repository, Signature};
use std::path::{Path, PathBuf};
const REFERENCES_DIR: &str = "references";
const LIBRARIES_DIR: &str = "libraries";
const ANNOTATIONS_DIR: &str = "annotations";
const PDFS_DIR: &str = "pdfs";
/// Filesystem + git-backed store. Each entity is a TOML file.
/// Snapshots are git commits; time travel is git checkout.
pub struct FsStore {
root: PathBuf,
repo: Repository,
}
impl FsStore {
/// Create a new Brittle repository at the given path.
/// Fails if the path already contains a git repository.
pub fn create(path: &Path) -> Result<Self, StoreError> {
if path.join(".git").exists() {
return Err(StoreError::RepoAlreadyExists {
path: path.to_owned(),
});
}
let repo = Repository::init(path).map_err(StoreError::Git)?;
// Create subdirectories.
for dir in [REFERENCES_DIR, LIBRARIES_DIR, ANNOTATIONS_DIR, PDFS_DIR] {
std::fs::create_dir_all(path.join(dir))?;
}
let mut store = Self {
root: path.to_owned(),
repo,
};
// Create the initial commit so the repo has a HEAD.
store.commit_all("Initialize Brittle repository")?;
Ok(store)
}
/// Open an existing Brittle repository.
pub fn open(path: &Path) -> Result<Self, StoreError> {
let repo = Repository::open(path).map_err(|_| StoreError::RepoNotFound {
path: path.to_owned(),
})?;
Ok(Self {
root: path.to_owned(),
repo,
})
}
/// Stage all changes and create a git commit. Returns the commit OID as hex.
fn commit_all(&mut self, message: &str) -> Result<String, StoreError> {
let mut index = self.repo.index().map_err(StoreError::Git)?;
index
.add_all(["*"].iter(), IndexAddOption::DEFAULT, None)
.map_err(StoreError::Git)?;
index.write().map_err(StoreError::Git)?;
let tree_oid = index.write_tree().map_err(StoreError::Git)?;
let tree = self.repo.find_tree(tree_oid).map_err(StoreError::Git)?;
let sig = Signature::now("Brittle", "brittle@local").map_err(StoreError::Git)?;
let parent_commit = self.repo.head().ok().and_then(|h| h.peel_to_commit().ok());
let oid = match &parent_commit {
Some(parent) => self
.repo
.commit(Some("HEAD"), &sig, &sig, message, &tree, &[parent])
.map_err(StoreError::Git)?,
None => self
.repo
.commit(Some("HEAD"), &sig, &sig, message, &tree, &[])
.map_err(StoreError::Git)?,
};
Ok(oid.to_string())
}
fn reference_path(&self, id: ReferenceId) -> PathBuf {
self.root.join(REFERENCES_DIR).join(format!("{id}.toml"))
}
fn library_path(&self, id: LibraryId) -> PathBuf {
self.root.join(LIBRARIES_DIR).join(format!("{id}.toml"))
}
fn annotation_path(&self, ref_id: ReferenceId) -> PathBuf {
self.root
.join(ANNOTATIONS_DIR)
.join(format!("{ref_id}.toml"))
}
pub fn pdf_dir(&self) -> PathBuf {
self.root.join(PDFS_DIR)
}
/// Returns the repository root directory.
pub fn root(&self) -> &Path {
&self.root
}
fn write_toml<T: serde::Serialize>(&self, path: &Path, value: &T) -> Result<(), StoreError> {
let content = toml::to_string(value).map_err(|e| StoreError::Serialization {
message: e.to_string(),
})?;
std::fs::write(path, content)?;
Ok(())
}
fn read_toml<T: serde::de::DeserializeOwned>(&self, path: &Path) -> Result<T, StoreError> {
let content = std::fs::read_to_string(path)?;
toml::from_str(&content).map_err(|e| StoreError::Deserialization {
path: path.to_owned(),
message: e.to_string(),
})
}
fn ids_from_dir<T, F>(&self, dir: &str, parse: F) -> Result<Vec<T>, StoreError>
where
F: Fn(&str) -> Option<T>,
{
let dir_path = self.root.join(dir);
let mut ids = Vec::new();
for entry in std::fs::read_dir(&dir_path)? {
let entry = entry?;
let name = entry.file_name();
let name = name.to_string_lossy();
if let Some(stem) = name.strip_suffix(".toml")
&& let Some(id) = parse(stem)
{
ids.push(id);
}
}
Ok(ids)
}
}
impl Store for FsStore {
fn save_reference(&mut self, reference: &Reference) -> Result<(), StoreError> {
self.write_toml(&self.reference_path(reference.id), reference)
}
fn load_reference(&self, id: ReferenceId) -> Result<Reference, StoreError> {
let path = self.reference_path(id);
if !path.exists() {
return Err(StoreError::NotFound {
entity_type: EntityType::Reference,
id: id.to_string(),
});
}
self.read_toml(&path)
}
fn delete_reference(&mut self, id: ReferenceId) -> Result<(), StoreError> {
let path = self.reference_path(id);
if !path.exists() {
return Err(StoreError::NotFound {
entity_type: EntityType::Reference,
id: id.to_string(),
});
}
std::fs::remove_file(path)?;
Ok(())
}
fn list_reference_ids(&self) -> Result<Vec<ReferenceId>, StoreError> {
self.ids_from_dir(REFERENCES_DIR, |s| {
s.parse::<uuid::Uuid>().ok().map(ReferenceId::from)
})
}
fn save_library(&mut self, library: &Library) -> Result<(), StoreError> {
self.write_toml(&self.library_path(library.id), library)
}
fn load_library(&self, id: LibraryId) -> Result<Library, StoreError> {
let path = self.library_path(id);
if !path.exists() {
return Err(StoreError::NotFound {
entity_type: EntityType::Library,
id: id.to_string(),
});
}
self.read_toml(&path)
}
fn delete_library(&mut self, id: LibraryId) -> Result<(), StoreError> {
let path = self.library_path(id);
if !path.exists() {
return Err(StoreError::NotFound {
entity_type: EntityType::Library,
id: id.to_string(),
});
}
std::fs::remove_file(path)?;
Ok(())
}
fn list_library_ids(&self) -> Result<Vec<LibraryId>, StoreError> {
self.ids_from_dir(LIBRARIES_DIR, |s| {
s.parse::<uuid::Uuid>().ok().map(LibraryId::from)
})
}
fn load_annotations(&self, ref_id: ReferenceId) -> Result<AnnotationSet, StoreError> {
let path = self.annotation_path(ref_id);
if !path.exists() {
return Ok(AnnotationSet::new(ref_id));
}
self.read_toml(&path)
}
fn save_annotations(&mut self, set: &AnnotationSet) -> Result<(), StoreError> {
self.write_toml(&self.annotation_path(set.reference_id), set)
}
fn delete_annotations(&mut self, ref_id: ReferenceId) -> Result<(), StoreError> {
let path = self.annotation_path(ref_id);
if path.exists() {
std::fs::remove_file(path)?;
}
Ok(())
}
fn create_snapshot(&mut self, message: &str) -> Result<Snapshot, StoreError> {
let oid = self.commit_all(message)?;
let commit = self
.repo
.find_commit(git2::Oid::from_str(&oid).map_err(StoreError::Git)?)
.map_err(StoreError::Git)?;
let timestamp = commit_timestamp(&commit)?;
Ok(Snapshot {
id: oid,
message: message.to_owned(),
timestamp,
})
}
fn list_snapshots(&self) -> Result<Vec<Snapshot>, StoreError> {
let mut revwalk = self.repo.revwalk().map_err(StoreError::Git)?;
revwalk.push_head().map_err(StoreError::Git)?;
revwalk
.set_sorting(git2::Sort::TIME)
.map_err(StoreError::Git)?;
let mut snapshots = Vec::new();
for oid in revwalk {
let oid = oid.map_err(StoreError::Git)?;
let commit = self.repo.find_commit(oid).map_err(StoreError::Git)?;
let message = commit.message().unwrap_or("").to_owned();
let timestamp = commit_timestamp(&commit)?;
snapshots.push(Snapshot {
id: oid.to_string(),
message,
timestamp,
});
}
Ok(snapshots)
}
fn restore_snapshot(&mut self, snapshot_id: &str) -> Result<(), StoreError> {
let oid = git2::Oid::from_str(snapshot_id).map_err(|_| StoreError::NotFound {
entity_type: EntityType::Snapshot,
id: snapshot_id.to_owned(),
})?;
let commit = self
.repo
.find_commit(oid)
.map_err(|_| StoreError::NotFound {
entity_type: EntityType::Snapshot,
id: snapshot_id.to_owned(),
})?;
let tree = commit.tree().map_err(StoreError::Git)?;
// Checkout the tree, updating both the index and the working directory.
// `force` overwrites modified tracked files; `remove_untracked` removes
// files that were written since the last snapshot but never committed.
let mut checkout_opts = git2::build::CheckoutBuilder::new();
checkout_opts.force().remove_untracked(true);
self.repo
.checkout_tree(tree.as_object(), Some(&mut checkout_opts))
.map_err(StoreError::Git)?;
// Move HEAD to point at the restored commit.
self.repo.set_head_detached(oid).map_err(StoreError::Git)?;
Ok(())
}
fn has_uncommitted_changes(&self) -> Result<bool, StoreError> {
let statuses = self
.repo
.statuses(Some(
git2::StatusOptions::new()
.include_untracked(true)
.recurse_untracked_dirs(true),
))
.map_err(StoreError::Git)?;
Ok(!statuses.is_empty())
}
}
fn commit_timestamp(commit: &git2::Commit<'_>) -> Result<DateTime<Utc>, StoreError> {
let time = commit.time();
Utc.timestamp_opt(time.seconds(), 0)
.single()
.ok_or_else(|| StoreError::Serialization {
message: "invalid commit timestamp".into(),
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::model::{EntryType, Library, Reference};
fn make_store(dir: &Path) -> FsStore {
FsStore::create(dir).expect("create store")
}
#[test]
fn create_and_open() {
let tmp = tempfile::tempdir().unwrap();
let store = make_store(tmp.path());
drop(store);
FsStore::open(tmp.path()).expect("re-open store");
}
#[test]
fn create_fails_if_repo_exists() {
let tmp = tempfile::tempdir().unwrap();
make_store(tmp.path());
assert!(FsStore::create(tmp.path()).is_err());
}
#[test]
fn save_load_delete_reference() {
let tmp = tempfile::tempdir().unwrap();
let mut store = make_store(tmp.path());
let r = Reference::new("test2024", EntryType::Article);
let id = r.id;
store.save_reference(&r).unwrap();
let loaded = store.load_reference(id).unwrap();
assert_eq!(loaded.cite_key, "test2024");
store.delete_reference(id).unwrap();
assert!(store.load_reference(id).is_err());
}
#[test]
fn list_reference_ids() {
let tmp = tempfile::tempdir().unwrap();
let mut store = make_store(tmp.path());
let r1 = Reference::new("a2024", EntryType::Article);
let r2 = Reference::new("b2024", EntryType::Book);
store.save_reference(&r1).unwrap();
store.save_reference(&r2).unwrap();
let ids = store.list_reference_ids().unwrap();
assert_eq!(ids.len(), 2);
}
#[test]
fn save_load_library() {
let tmp = tempfile::tempdir().unwrap();
let mut store = make_store(tmp.path());
let lib = Library::new("ML Papers", None);
let id = lib.id;
store.save_library(&lib).unwrap();
let loaded = store.load_library(id).unwrap();
assert_eq!(loaded.name, "ML Papers");
}
#[test]
fn annotations_missing_returns_empty_set() {
let tmp = tempfile::tempdir().unwrap();
let store = make_store(tmp.path());
let ref_id = ReferenceId::new();
let set = store.load_annotations(ref_id).unwrap();
assert!(set.annotations.is_empty());
}
#[test]
fn create_and_list_snapshot() {
let tmp = tempfile::tempdir().unwrap();
let mut store = make_store(tmp.path());
// Save something so there's content to commit beyond the initial commit.
let r = Reference::new("snap2024", EntryType::Misc);
store.save_reference(&r).unwrap();
let snap = store.create_snapshot("my first snapshot").unwrap();
let snapshots = store.list_snapshots().unwrap();
assert!(snapshots.iter().any(|s| s.id == snap.id));
assert!(snapshots.iter().any(|s| s.message == "my first snapshot"));
}
#[test]
fn restore_snapshot_reverts_state() {
let tmp = tempfile::tempdir().unwrap();
let mut store = make_store(tmp.path());
let r = Reference::new("before2024", EntryType::Article);
let ref_id = r.id;
store.save_reference(&r).unwrap();
let snap = store.create_snapshot("baseline").unwrap();
// Modify state: add another reference.
let r2 = Reference::new("after2024", EntryType::Book);
store.save_reference(&r2).unwrap();
assert_eq!(store.list_reference_ids().unwrap().len(), 2);
// Restore to baseline — should have only 1 reference.
store.restore_snapshot(&snap.id).unwrap();
let store2 = FsStore::open(tmp.path()).unwrap();
assert_eq!(store2.list_reference_ids().unwrap().len(), 1);
assert!(store2.load_reference(ref_id).is_ok());
}
#[test]
fn has_uncommitted_changes_detects_new_files() {
let tmp = tempfile::tempdir().unwrap();
let mut store = make_store(tmp.path());
assert!(!store.has_uncommitted_changes().unwrap());
let r = Reference::new("new2024", EntryType::Misc);
store.save_reference(&r).unwrap();
assert!(store.has_uncommitted_changes().unwrap());
}
}

View File

@@ -0,0 +1,302 @@
use crate::error::{EntityType, StoreError};
use crate::model::{AnnotationSet, Library, LibraryId, Reference, ReferenceId, Snapshot};
use crate::store::Store;
use chrono::Utc;
use std::collections::HashMap;
/// In-memory store for testing. Not suitable for production use.
#[derive(Debug, Default)]
pub struct MemoryStore {
references: HashMap<ReferenceId, Reference>,
libraries: HashMap<LibraryId, Library>,
annotations: HashMap<ReferenceId, AnnotationSet>,
/// Checkpoints for snapshot simulation: (id, message, cloned state).
snapshots: Vec<(String, String, Box<MemorySnapshot>)>,
next_snapshot_idx: usize,
}
#[derive(Debug)]
struct MemorySnapshot {
references: HashMap<ReferenceId, Reference>,
libraries: HashMap<LibraryId, Library>,
annotations: HashMap<ReferenceId, AnnotationSet>,
}
impl MemoryStore {
pub fn new() -> Self {
Self::default()
}
}
impl Store for MemoryStore {
fn save_reference(&mut self, reference: &Reference) -> Result<(), StoreError> {
self.references.insert(reference.id, reference.clone());
Ok(())
}
fn load_reference(&self, id: ReferenceId) -> Result<Reference, StoreError> {
self.references
.get(&id)
.cloned()
.ok_or_else(|| StoreError::NotFound {
entity_type: EntityType::Reference,
id: id.to_string(),
})
}
fn delete_reference(&mut self, id: ReferenceId) -> Result<(), StoreError> {
self.references
.remove(&id)
.ok_or_else(|| StoreError::NotFound {
entity_type: EntityType::Reference,
id: id.to_string(),
})?;
Ok(())
}
fn list_reference_ids(&self) -> Result<Vec<ReferenceId>, StoreError> {
Ok(self.references.keys().copied().collect())
}
fn save_library(&mut self, library: &Library) -> Result<(), StoreError> {
self.libraries.insert(library.id, library.clone());
Ok(())
}
fn load_library(&self, id: LibraryId) -> Result<Library, StoreError> {
self.libraries
.get(&id)
.cloned()
.ok_or_else(|| StoreError::NotFound {
entity_type: EntityType::Library,
id: id.to_string(),
})
}
fn delete_library(&mut self, id: LibraryId) -> Result<(), StoreError> {
self.libraries
.remove(&id)
.ok_or_else(|| StoreError::NotFound {
entity_type: EntityType::Library,
id: id.to_string(),
})?;
Ok(())
}
fn list_library_ids(&self) -> Result<Vec<LibraryId>, StoreError> {
Ok(self.libraries.keys().copied().collect())
}
fn load_annotations(&self, ref_id: ReferenceId) -> Result<AnnotationSet, StoreError> {
Ok(self
.annotations
.get(&ref_id)
.cloned()
.unwrap_or_else(|| AnnotationSet::new(ref_id)))
}
fn save_annotations(&mut self, set: &AnnotationSet) -> Result<(), StoreError> {
self.annotations.insert(set.reference_id, set.clone());
Ok(())
}
fn delete_annotations(&mut self, ref_id: ReferenceId) -> Result<(), StoreError> {
self.annotations.remove(&ref_id);
Ok(())
}
fn create_snapshot(&mut self, message: &str) -> Result<Snapshot, StoreError> {
let id = format!("mem-snapshot-{:04}", self.next_snapshot_idx);
self.next_snapshot_idx += 1;
let snapshot_data = Box::new(MemorySnapshot {
references: self.references.clone(),
libraries: self.libraries.clone(),
annotations: self.annotations.clone(),
});
let timestamp = Utc::now();
self.snapshots
.push((id.clone(), message.to_owned(), snapshot_data));
Ok(Snapshot {
id,
message: message.to_owned(),
timestamp,
})
}
fn list_snapshots(&self) -> Result<Vec<Snapshot>, StoreError> {
let snapshots = self
.snapshots
.iter()
.rev()
.map(|(id, message, _)| Snapshot {
id: id.clone(),
message: message.clone(),
timestamp: Utc::now(), // timestamps not stored in MemoryStore
})
.collect();
Ok(snapshots)
}
fn restore_snapshot(&mut self, snapshot_id: &str) -> Result<(), StoreError> {
let snapshot = self
.snapshots
.iter()
.find(|(id, _, _)| id == snapshot_id)
.ok_or_else(|| StoreError::NotFound {
entity_type: EntityType::Snapshot,
id: snapshot_id.to_owned(),
})?;
self.references = snapshot.2.references.clone();
self.libraries = snapshot.2.libraries.clone();
self.annotations = snapshot.2.annotations.clone();
Ok(())
}
fn has_uncommitted_changes(&self) -> Result<bool, StoreError> {
// MemoryStore has no concept of uncommitted changes.
Ok(false)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::model::{AnnotationType, EntryType, Library, Reference, TextMarkupType};
fn make_reference() -> Reference {
Reference::new("test2024", EntryType::Article)
}
fn make_library() -> Library {
Library::new("Test Library", None)
}
#[test]
fn save_and_load_reference() {
let mut store = MemoryStore::new();
let r = make_reference();
let id = r.id;
store.save_reference(&r).unwrap();
let r2 = store.load_reference(id).unwrap();
assert_eq!(r.cite_key, r2.cite_key);
}
#[test]
fn load_missing_reference_returns_error() {
let store = MemoryStore::new();
let id = ReferenceId::new();
let err = store.load_reference(id).unwrap_err();
assert!(matches!(
err,
StoreError::NotFound {
entity_type: EntityType::Reference,
..
}
));
}
#[test]
fn delete_reference() {
let mut store = MemoryStore::new();
let r = make_reference();
let id = r.id;
store.save_reference(&r).unwrap();
store.delete_reference(id).unwrap();
assert!(store.load_reference(id).is_err());
}
#[test]
fn list_reference_ids() {
let mut store = MemoryStore::new();
let r1 = make_reference();
let r2 = make_reference();
store.save_reference(&r1).unwrap();
store.save_reference(&r2).unwrap();
let ids = store.list_reference_ids().unwrap();
assert_eq!(ids.len(), 2);
assert!(ids.contains(&r1.id));
assert!(ids.contains(&r2.id));
}
#[test]
fn save_and_load_library() {
let mut store = MemoryStore::new();
let lib = make_library();
let id = lib.id;
store.save_library(&lib).unwrap();
let lib2 = store.load_library(id).unwrap();
assert_eq!(lib.name, lib2.name);
}
#[test]
fn delete_library() {
let mut store = MemoryStore::new();
let lib = make_library();
let id = lib.id;
store.save_library(&lib).unwrap();
store.delete_library(id).unwrap();
assert!(store.load_library(id).is_err());
}
#[test]
fn annotations_default_to_empty_set() {
let store = MemoryStore::new();
let ref_id = ReferenceId::new();
let set = store.load_annotations(ref_id).unwrap();
assert_eq!(set.reference_id, ref_id);
assert!(set.annotations.is_empty());
}
#[test]
fn save_and_load_annotations() {
use crate::model::{Annotation, Color};
let mut store = MemoryStore::new();
let ref_id = ReferenceId::new();
let ann = Annotation::new(
ref_id,
0,
AnnotationType::TextMarkup {
markup_type: TextMarkupType::Highlight,
quads: vec![],
color: Color::YELLOW,
selected_text: None,
},
);
let set = AnnotationSet {
reference_id: ref_id,
annotations: vec![ann],
};
store.save_annotations(&set).unwrap();
let set2 = store.load_annotations(ref_id).unwrap();
assert_eq!(set2.annotations.len(), 1);
}
#[test]
fn snapshot_create_and_restore() {
let mut store = MemoryStore::new();
let r = make_reference();
let ref_id = r.id;
store.save_reference(&r).unwrap();
let snap = store.create_snapshot("first snapshot").unwrap();
// Modify state after snapshot.
store.delete_reference(ref_id).unwrap();
assert!(store.load_reference(ref_id).is_err());
// Restore snapshot.
store.restore_snapshot(&snap.id).unwrap();
assert!(store.load_reference(ref_id).is_ok());
}
#[test]
fn list_snapshots_in_reverse_order() {
let mut store = MemoryStore::new();
store.create_snapshot("first").unwrap();
store.create_snapshot("second").unwrap();
let snaps = store.list_snapshots().unwrap();
assert_eq!(snaps.len(), 2);
assert_eq!(snaps[0].message, "second"); // most recent first
}
}

View File

@@ -0,0 +1,44 @@
pub mod fs;
pub mod memory;
use crate::error::StoreError;
use crate::model::{AnnotationSet, Library, LibraryId, Reference, ReferenceId, Snapshot};
/// Abstraction over the storage backend.
///
/// The git-backed filesystem (`FsStore`) is the production implementation.
/// An in-memory implementation (`MemoryStore`) exists for testing.
pub trait Store {
// ---- References ----
fn save_reference(&mut self, reference: &Reference) -> Result<(), StoreError>;
fn load_reference(&self, id: ReferenceId) -> Result<Reference, StoreError>;
fn delete_reference(&mut self, id: ReferenceId) -> Result<(), StoreError>;
fn list_reference_ids(&self) -> Result<Vec<ReferenceId>, StoreError>;
// ---- Libraries ----
fn save_library(&mut self, library: &Library) -> Result<(), StoreError>;
fn load_library(&self, id: LibraryId) -> Result<Library, StoreError>;
fn delete_library(&mut self, id: LibraryId) -> Result<(), StoreError>;
fn list_library_ids(&self) -> Result<Vec<LibraryId>, StoreError>;
// ---- Annotations ----
/// Load the annotation set for a reference. Returns an empty set if none exists.
fn load_annotations(&self, ref_id: ReferenceId) -> Result<AnnotationSet, StoreError>;
fn save_annotations(&mut self, set: &AnnotationSet) -> Result<(), StoreError>;
fn delete_annotations(&mut self, ref_id: ReferenceId) -> Result<(), StoreError>;
// ---- Snapshots ----
fn create_snapshot(&mut self, message: &str) -> Result<Snapshot, StoreError>;
fn list_snapshots(&self) -> Result<Vec<Snapshot>, StoreError>;
/// Restore to a previous snapshot. Caller must ensure no uncommitted changes exist.
fn restore_snapshot(&mut self, snapshot_id: &str) -> Result<(), StoreError>;
fn has_uncommitted_changes(&self) -> Result<bool, StoreError>;
}
// Re-export concrete types for convenience.
pub use fs::FsStore;
pub use memory::MemoryStore;

View File

@@ -0,0 +1,163 @@
/// End-to-end integration test using a real Brittle<FsStore> repository.
///
/// Exercises the full workflow: create repo, add references with authors,
/// organize in libraries, export BibTeX, create a snapshot, modify state,
/// restore the snapshot, and verify everything reverted correctly.
use brittle_core::{
AnnotationType, Brittle, BrittleError, Color, EntryType, Person, TextMarkupType,
ValidationError,
};
#[test]
fn full_workflow() {
let tmp = tempfile::tempdir().unwrap();
let mut db = Brittle::create(tmp.path()).unwrap();
// ---- Create references ----
let mut turing = db
.create_reference("turing1950", EntryType::Article)
.unwrap();
turing.authors.push(Person {
family: "Turing".into(),
given: Some("Alan M.".into()),
prefix: None,
suffix: None,
});
turing.fields.insert(
"title".into(),
"Computing Machinery and Intelligence".into(),
);
turing.fields.insert("journal".into(), "Mind".into());
turing.fields.insert("year".into(), "1950".into());
let turing = db.update_reference(turing).unwrap();
let mut knuth = db.create_reference("knuth1984", EntryType::Book).unwrap();
knuth.authors.push(Person::new("Knuth"));
knuth.fields.insert("title".into(), "The TeXbook".into());
knuth
.fields
.insert("publisher".into(), "Addison-Wesley".into());
knuth.fields.insert("year".into(), "1984".into());
let knuth = db.update_reference(knuth).unwrap();
// ---- Organize in libraries ----
let cs = db.create_library("Computer Science", None).unwrap();
let ai = db.create_library("AI", Some(cs.id)).unwrap();
db.add_to_library(cs.id, turing.id).unwrap();
db.add_to_library(ai.id, turing.id).unwrap(); // multi-membership
db.add_to_library(cs.id, knuth.id).unwrap();
// Both references are in CS.
let cs_refs = db.list_library_references(cs.id).unwrap();
assert_eq!(cs_refs.len(), 2);
// Turing is in both CS and AI.
let turing_libs = db.list_reference_libraries(turing.id).unwrap();
assert_eq!(turing_libs.len(), 2);
// ---- BibTeX export ----
let (bibtex, errors) = db.export_library_bibtex(cs.id).unwrap();
assert!(errors.is_empty(), "unexpected BibTeX errors: {errors:?}");
assert!(bibtex.contains("@article{turing1950,"));
assert!(bibtex.contains("Turing, Alan M."));
assert!(bibtex.contains("Computing Machinery and Intelligence"));
assert!(bibtex.contains("@book{knuth1984,"));
// ---- Annotations ----
let ann = db
.create_annotation(
turing.id,
0,
AnnotationType::TextMarkup {
markup_type: TextMarkupType::Highlight,
quads: vec![],
color: Color::YELLOW,
selected_text: Some("The Imitation Game".into()),
},
Some("Key concept".into()),
)
.unwrap();
let annotations = db.get_annotations(turing.id).unwrap();
assert_eq!(annotations.len(), 1);
assert_eq!(annotations[0].content.as_deref(), Some("Key concept"));
// ---- Snapshot ----
let snap = db
.create_snapshot("Baseline with Turing and Knuth")
.unwrap();
assert!(!snap.id.is_empty());
let snapshots = db.list_snapshots().unwrap();
// At least our named snapshot + the initial "Initialize Brittle repository" commit.
assert!(snapshots.len() >= 2);
assert!(
snapshots
.iter()
.any(|s| s.message == "Baseline with Turing and Knuth")
);
// ---- Modify state after snapshot ----
db.delete_reference(knuth.id).unwrap();
assert!(db.get_reference(knuth.id).is_err());
let cs_refs_after = db.list_library_references(cs.id).unwrap();
assert_eq!(
cs_refs_after.len(),
1,
"Knuth should have been removed from library"
);
// ---- Restore snapshot ----
// The knuth deletion is written to disk but not committed — verify this.
assert!(db.has_uncommitted_changes().unwrap());
// restore_snapshot errors on uncommitted changes; use discard_changes instead.
db.discard_changes().unwrap();
// After restore, Knuth should be back.
let knuth_restored = db.get_reference(knuth.id).unwrap();
assert_eq!(knuth_restored.cite_key, "knuth1984");
// CS library should have 2 members again.
let cs_refs_restored = db.list_library_references(cs.id).unwrap();
assert_eq!(cs_refs_restored.len(), 2);
// Inspect git log to verify history is human-readable.
let snapshots_after = db.list_snapshots().unwrap();
assert!(!snapshots_after.is_empty());
}
#[test]
fn get_pdf_path_returns_error_when_no_pdf_attached() {
let tmp = tempfile::tempdir().unwrap();
let mut db = Brittle::create(tmp.path()).unwrap();
let r = db.create_reference("nopdf2024", EntryType::Misc).unwrap();
let err = db.get_pdf_path(r.id).unwrap_err();
assert!(
matches!(
err,
BrittleError::Validation(ValidationError::NoPdfAttached { .. })
),
"expected NoPdfAttached, got {err}"
);
}
#[test]
fn get_pdf_path_returns_path_after_attach() {
let tmp = tempfile::tempdir().unwrap();
let mut db = Brittle::create(tmp.path()).unwrap();
let r = db.create_reference("withpdf2024", EntryType::Misc).unwrap();
// Write a dummy PDF file.
let source = tmp.path().join("dummy.pdf");
std::fs::write(&source, b"%PDF-1.4 dummy").unwrap();
db.attach_pdf(r.id, &source).unwrap();
let path = db.get_pdf_path(r.id).unwrap();
assert!(path.exists(), "PDF path {path:?} should exist on disk");
assert_eq!(path.extension().and_then(|e| e.to_str()), Some("pdf"));
}