diff --git a/native-route/.gradle/9.3.0/checksums/checksums.lock b/native-route/.gradle/9.3.0/checksums/checksums.lock
new file mode 100644
index 0000000..4d5e085
Binary files /dev/null and b/native-route/.gradle/9.3.0/checksums/checksums.lock differ
diff --git a/native-route/.gradle/9.3.0/fileChanges/last-build.bin b/native-route/.gradle/9.3.0/fileChanges/last-build.bin
new file mode 100644
index 0000000..f76dd23
Binary files /dev/null and b/native-route/.gradle/9.3.0/fileChanges/last-build.bin differ
diff --git a/native-route/.gradle/9.3.0/fileHashes/fileHashes.lock b/native-route/.gradle/9.3.0/fileHashes/fileHashes.lock
new file mode 100644
index 0000000..c5a3a9e
Binary files /dev/null and b/native-route/.gradle/9.3.0/fileHashes/fileHashes.lock differ
diff --git a/native-route/.gradle/9.3.0/gc.properties b/native-route/.gradle/9.3.0/gc.properties
new file mode 100644
index 0000000..e69de29
diff --git a/native-route/.gradle/buildOutputCleanup/buildOutputCleanup.lock b/native-route/.gradle/buildOutputCleanup/buildOutputCleanup.lock
new file mode 100644
index 0000000..b62273a
Binary files /dev/null and b/native-route/.gradle/buildOutputCleanup/buildOutputCleanup.lock differ
diff --git a/native-route/.gradle/buildOutputCleanup/cache.properties b/native-route/.gradle/buildOutputCleanup/cache.properties
new file mode 100644
index 0000000..b488e81
--- /dev/null
+++ b/native-route/.gradle/buildOutputCleanup/cache.properties
@@ -0,0 +1,2 @@
+#Sun Mar 29 20:35:39 EDT 2026
+gradle.version=9.3.0
diff --git a/native-route/.gradle/vcs-1/gc.properties b/native-route/.gradle/vcs-1/gc.properties
new file mode 100644
index 0000000..e69de29
diff --git a/native-route/android/.gradle/9.3.0/checksums/checksums.lock b/native-route/android/.gradle/9.3.0/checksums/checksums.lock
new file mode 100644
index 0000000..6f7861f
Binary files /dev/null and b/native-route/android/.gradle/9.3.0/checksums/checksums.lock differ
diff --git a/native-route/android/.gradle/9.3.0/fileChanges/last-build.bin b/native-route/android/.gradle/9.3.0/fileChanges/last-build.bin
new file mode 100644
index 0000000..f76dd23
Binary files /dev/null and b/native-route/android/.gradle/9.3.0/fileChanges/last-build.bin differ
diff --git a/native-route/android/.gradle/9.3.0/fileHashes/fileHashes.lock b/native-route/android/.gradle/9.3.0/fileHashes/fileHashes.lock
new file mode 100644
index 0000000..8e3429a
Binary files /dev/null and b/native-route/android/.gradle/9.3.0/fileHashes/fileHashes.lock differ
diff --git a/native-route/android/.gradle/9.3.0/gc.properties b/native-route/android/.gradle/9.3.0/gc.properties
new file mode 100644
index 0000000..e69de29
diff --git a/native-route/android/.gradle/buildOutputCleanup/buildOutputCleanup.lock b/native-route/android/.gradle/buildOutputCleanup/buildOutputCleanup.lock
new file mode 100644
index 0000000..89ec0dc
Binary files /dev/null and b/native-route/android/.gradle/buildOutputCleanup/buildOutputCleanup.lock differ
diff --git a/native-route/android/.gradle/buildOutputCleanup/cache.properties b/native-route/android/.gradle/buildOutputCleanup/cache.properties
new file mode 100644
index 0000000..4b7d3e4
--- /dev/null
+++ b/native-route/android/.gradle/buildOutputCleanup/cache.properties
@@ -0,0 +1,2 @@
+#Sun Mar 29 20:35:09 EDT 2026
+gradle.version=9.3.0
diff --git a/native-route/android/.gradle/vcs-1/gc.properties b/native-route/android/.gradle/vcs-1/gc.properties
new file mode 100644
index 0000000..e69de29
diff --git a/native-route/android/build/reports/problems/problems-report.html b/native-route/android/build/reports/problems/problems-report.html
new file mode 100644
index 0000000..4ca7953
--- /dev/null
+++ b/native-route/android/build/reports/problems/problems-report.html
@@ -0,0 +1,659 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ Gradle Configuration Cache
+
+
+
+
+
+
+ Loading...
+
+
+
+
+
+
+
diff --git a/native-route/ios/RSSuper b/native-route/ios/RSSuper
index 5f6142b..914c13a 160000
--- a/native-route/ios/RSSuper
+++ b/native-route/ios/RSSuper
@@ -1 +1 @@
-Subproject commit 5f6142b128e85168ad4870a30f76f50c241fdea1
+Subproject commit 914c13a734d958eca49ef64fe9cf4c29cdee12f3
diff --git a/native-route/linux/meson.build b/native-route/linux/meson.build
index 76d4852..415b493 100644
--- a/native-route/linux/meson.build
+++ b/native-route/linux/meson.build
@@ -14,6 +14,8 @@ meson_version_check = run_command(vala, '--version', check: true)
glib_dep = dependency('glib-2.0', version: '>= 2.58')
gio_dep = dependency('gio-2.0', version: '>= 2.58')
json_dep = dependency('json-glib-1.0', version: '>= 1.4')
+sqlite_dep = dependency('sqlite3', version: '>= 3.0')
+gobject_dep = dependency('gobject-2.0', version: '>= 2.58')
# Source files
models = files(
@@ -26,8 +28,38 @@ models = files(
'src/models/reading-preferences.vala',
)
+# Database files
+database = files(
+ 'src/database/database.vala',
+ 'src/database/subscription-store.vala',
+ 'src/database/feed-item-store.vala',
+ 'src/database/search-history-store.vala',
+ 'src/database/sqlite3.vapi',
+ 'src/database/errors.vapi',
+)
+
# Main library
models_lib = library('rssuper-models', models,
dependencies: [glib_dep, gio_dep, json_dep],
install: false
)
+
+# Database library
+database_lib = library('rssuper-database', database,
+ dependencies: [glib_dep, gio_dep, json_dep, sqlite_dep, gobject_dep],
+ link_with: [models_lib],
+ install: false,
+ vala_args: ['--vapidir', 'src/database', '--pkg', 'sqlite3']
+)
+
+# Test executable
+test_exe = executable('database-tests',
+ 'src/tests/database-tests.vala',
+ dependencies: [glib_dep, gio_dep, json_dep, sqlite_dep, gobject_dep],
+ link_with: [models_lib, database_lib],
+ vala_args: ['--vapidir', 'src/database', '--pkg', 'sqlite3'],
+ install: false
+)
+
+# Test definition
+test('database tests', test_exe)
diff --git a/native-route/linux/src/database/database.vala b/native-route/linux/src/database/database.vala
new file mode 100644
index 0000000..9f9eb84
--- /dev/null
+++ b/native-route/linux/src/database/database.vala
@@ -0,0 +1,213 @@
+/*
+ * Database.vala
+ *
+ * Core database connection and migration management for RSSuper Linux.
+ * Uses SQLite with FTS5 for full-text search capabilities.
+ */
+
+/**
+ * Database - Manages SQLite database connection and migrations
+ */
+public class RSSuper.Database : Object {
+ private SQLite.DB db;
+ private string db_path;
+
+ /**
+ * Current database schema version
+ */
+ public const int CURRENT_VERSION = 1;
+
+ /**
+ * Signal emitted when database is ready
+ */
+ public signal void ready();
+
+ /**
+ * Signal emitted on error
+ */
+ public signal void error(string message);
+
+ /**
+ * Create a new database connection
+ *
+ * @param db_path Path to the SQLite database file
+ */
+ public Database(string db_path) throws Error {
+ this.db_path = db_path;
+ this.open();
+ this.migrate();
+ }
+
+ /**
+ * Open database connection
+ */
+ private void open() throws Error {
+ var file = File.new_for_path(db_path);
+ var parent = file.get_parent();
+ if (parent != null && !parent.query_exists()) {
+ try {
+ parent.make_directory_with_parents();
+ } catch (Error e) {
+ throw throw new Error.FAILED("Failed to create database directory: %s", e.message);
+ }
+ }
+
+ int result = SQLite.DB.open(db_path, out db);
+ if (result != SQLite.SQLITE_OK) {
+ throw throw new Error.FAILED("Failed to open database: %s".printf(db.errmsg()));
+ }
+
+ execute("PRAGMA foreign_keys = ON;");
+ execute("PRAGMA journal_mode = WAL;");
+
+ debug("Database opened: %s", db_path);
+ }
+
+ /**
+ * Run database migrations
+ */
+ private void migrate() throws Error {
+ execute(@"CREATE TABLE IF NOT EXISTS schema_migrations (
+ version INTEGER PRIMARY KEY,
+ applied_at TEXT NOT NULL DEFAULT (datetime('now'))
+ );");
+
+ int current_version = get_current_version();
+ debug("Current migration version: %d", current_version);
+
+ if (current_version >= CURRENT_VERSION) {
+ debug("Database is up to date");
+ return;
+ }
+
+ try {
+ var schema_path = Path.build_filename(Path.get_dirname(db_path), "schema.sql");
+ var schema_file = File.new_for_path(schema_path);
+
+ if (!schema_file.query_exists()) {
+ schema_path = "src/database/schema.sql";
+ schema_file = File.new_for_path(schema_path);
+ }
+
+ if (!schema_file.query_exists()) {
+ throw throw new Error.FAILED("Schema file not found: %s".printf(schema_path));
+ }
+
+ uint8[] schema_bytes;
+ GLib.Cancellable? cancellable = null;
+ string? schema_str = null;
+ try {
+ schema_file.load_contents(cancellable, out schema_bytes, out schema_str);
+ } catch (Error e) {
+ throw throw new Error.FAILED("Failed to read schema file: %s", e.message);
+ }
+ string schema = schema_str ?? (string) schema_bytes;
+
+ execute(schema);
+ execute("INSERT OR REPLACE INTO schema_migrations (version, applied_at) VALUES (" + CURRENT_VERSION.to_string() + ", datetime('now'));");
+
+ debug("Database migrated to version %d", CURRENT_VERSION);
+
+ } catch (Error e) {
+ throw throw new Error.FAILED("Migration failed: %s".printf(e.message));
+ }
+ }
+
+ /**
+ * Get current migration version
+ */
+ private int get_current_version() throws Error {
+ try {
+ SQLite.Stmt stmt;
+ int result = db.prepare_v2("SELECT COALESCE(MAX(version), 0) FROM schema_migrations;", -1, out stmt, null);
+
+ if (result != SQLite.SQLITE_OK) {
+ throw throw new Error.FAILED("Failed to prepare statement: %s".printf(db.errmsg()));
+ }
+
+ int version = 0;
+ if (stmt.step() == SQLite.SQLITE_ROW) {
+ version = stmt.column_int(0);
+ }
+
+ return version;
+
+ } catch (Error e) {
+ throw throw new Error.FAILED("Failed to get migration version: %s".printf(e.message));
+ }
+ }
+
+ /**
+ * Execute a SQL statement
+ */
+ public void execute(string sql) throws Error {
+ string errmsg = null;
+ int result = db.exec(sql, null, null, out errmsg);
+
+ if (result != SQLite.SQLITE_OK) {
+ throw throw new Error.FAILED("SQL execution failed: %s\nSQL: %s".printf(errmsg, sql));
+ }
+ }
+
+ /**
+ * Prepare a SQL statement
+ */
+ public SQLite.Stmt prepare(string sql) throws Error {
+ SQLite.Stmt stmt;
+ int result = db.prepare_v2(sql, -1, out stmt, null);
+
+ if (result != SQLite.SQLITE_OK) {
+ throw throw new Error.FAILED("Failed to prepare statement: %s\nSQL: %s".printf(db.errmsg(), sql));
+ }
+
+ return stmt;
+ }
+
+ /**
+ * Get the database connection handle
+ */
+ public SQLite.DB get_handle() {
+ return db;
+ }
+
+ /**
+ * Close database connection
+ */
+ public void close() {
+ if (db != null) {
+ db = null;
+ debug("Database closed: %s", db_path);
+ }
+ }
+
+ /**
+ * Begin a transaction
+ */
+ public void begin_transaction() throws Error {
+ execute("BEGIN TRANSACTION;");
+ }
+
+ /**
+ * Commit a transaction
+ */
+ public void commit() throws Error {
+ execute("COMMIT;");
+ }
+
+ /**
+ * Rollback a transaction
+ */
+ public void rollback() throws Error {
+ execute("ROLLBACK;");
+ }
+
+ /* Helper to convert GLib.List to array */
+ private T[] toArray(GLib.List list) {
+ T[] arr = {};
+ for (unowned var node = list; node != null; node = node.next) {
+ arr += node.data;
+ }
+ return arr;
+ }
+
+}
diff --git a/native-route/linux/src/database/feed-item-store.vala b/native-route/linux/src/database/feed-item-store.vala
new file mode 100644
index 0000000..d388302
--- /dev/null
+++ b/native-route/linux/src/database/feed-item-store.vala
@@ -0,0 +1,416 @@
+/*
+ * FeedItemStore.vala
+ *
+ * CRUD operations for feed items with FTS search support.
+ */
+
+/**
+ * FeedItemStore - Manages feed item persistence
+ */
+public class RSSuper.FeedItemStore : Object {
+ private Database db;
+
+ /**
+ * Signal emitted when an item is added
+ */
+ public signal void item_added(FeedItem item);
+
+ /**
+ * Signal emitted when an item is updated
+ */
+ public signal void item_updated(FeedItem item);
+
+ /**
+ * Signal emitted when an item is deleted
+ */
+ public signal void item_deleted(string id);
+
+ /**
+ * Create a new feed item store
+ */
+ public FeedItemStore(Database db) {
+ this.db = db;
+ }
+
+ /**
+ * Add a new feed item
+ */
+ public FeedItem add(FeedItem item) throws Error {
+ var stmt = db.prepare(
+ "INSERT INTO feed_items (id, subscription_id, title, link, description, content, " +
+ "author, published, updated, categories, enclosure_url, enclosure_type, " +
+ "enclosure_length, guid, is_read, is_starred) " +
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);"
+ );
+
+ stmt.bind_text(1, item.id, -1, null);
+ stmt.bind_text(2, item.subscription_title ?? "", -1, null);
+ stmt.bind_text(3, item.title, -1, null);
+ stmt.bind_text(4, item.link ?? "", -1, null);
+ stmt.bind_text(5, item.description ?? "", -1, null);
+ stmt.bind_text(6, item.content ?? "", -1, null);
+ stmt.bind_text(7, item.author ?? "", -1, null);
+ stmt.bind_text(8, item.published ?? "", -1, null);
+ stmt.bind_text(9, item.updated ?? "", -1, null);
+ stmt.bind_text(10, format_categories(item.categories), -1, null);
+ stmt.bind_text(11, item.enclosure_url ?? "", -1, null);
+ stmt.bind_text(12, item.enclosure_type ?? "", -1, null);
+ stmt.bind_text(13, item.enclosure_length ?? "", -1, null);
+ stmt.bind_text(14, item.guid ?? "", -1, null);
+ stmt.bind_int(15, 0); // is_read
+ stmt.bind_int(16, 0); // is_starred
+
+ stmt.step();
+
+ debug("Feed item added: %s", item.id);
+ item_added(item);
+
+ return item;
+ }
+
+ /**
+ * Add multiple items in a batch
+ */
+ public void add_batch(FeedItem[] items) throws Error {
+ db.begin_transaction();
+ try {
+ foreach (var item in items) {
+ add(item);
+ }
+ db.commit();
+ debug("Batch insert completed: %d items", items.length);
+ } catch (Error e) {
+ db.rollback();
+ throw new DBError.FAILED("Transaction failed: %s".printf(e.message));
+ }
+ }
+
+ /**
+ * Get an item by ID
+ */
+ public FeedItem? get_by_id(string id) throws Error {
+ var stmt = db.prepare(
+ "SELECT id, subscription_id, title, link, description, content, author, " +
+ "published, updated, categories, enclosure_url, enclosure_type, " +
+ "enclosure_length, guid, is_read, is_starred " +
+ "FROM feed_items WHERE id = ?;"
+ );
+
+ stmt.bind_text(1, id, -1, null);
+
+ if (stmt.step() == SQLite.SQLITE_ROW) {
+ return row_to_item(stmt);
+ }
+
+ return null;
+ }
+
+ /**
+ * Get items by subscription ID
+ */
+ public FeedItem[] get_by_subscription(string subscription_id) throws Error {
+ var items = new GLib.List();
+
+ var stmt = db.prepare(
+ "SELECT id, subscription_id, title, link, description, content, author, " +
+ "published, updated, categories, enclosure_url, enclosure_type, " +
+ "enclosure_length, guid, is_read, is_starred " +
+ "FROM feed_items WHERE subscription_id = ? " +
+ "ORDER BY published DESC LIMIT 100;"
+ );
+
+ stmt.bind_text(1, subscription_id, -1, null);
+
+ while (stmt.step() == SQLite.SQLITE_ROW) {
+ var item = row_to_item(stmt);
+ if (item != null) {
+ items.append(item);
+ }
+ }
+
+ return items_to_array(items);
+ }
+
+ /**
+ * Get all items
+ */
+ public FeedItem[] get_all() throws Error {
+ var items = new GLib.List();
+
+ var stmt = db.prepare(
+ "SELECT id, subscription_id, title, link, description, content, author, " +
+ "published, updated, categories, enclosure_url, enclosure_type, " +
+ "enclosure_length, guid, is_read, is_starred " +
+ "FROM feed_items ORDER BY published DESC LIMIT 1000;"
+ );
+
+ while (stmt.step() == SQLite.SQLITE_ROW) {
+ var item = row_to_item(stmt);
+ if (item != null) {
+ items.append(item);
+ }
+ }
+
+ return items_to_array(items);
+ }
+
+ /**
+ * Search items using FTS
+ */
+ public FeedItem[] search(string query, int limit = 50) throws Error {
+ var items = new GLib.List();
+
+ var stmt = db.prepare(
+ "SELECT f.id, f.subscription_id, f.title, f.link, f.description, f.content, " +
+ "f.author, f.published, f.updated, f.categories, f.enclosure_url, " +
+ "f.enclosure_type, f.enclosure_length, f.guid, f.is_read, f.is_starred " +
+ "FROM feed_items_fts t " +
+ "JOIN feed_items f ON t.rowid = f.rowid " +
+ "WHERE feed_items_fts MATCH ? " +
+ "ORDER BY rank " +
+ "LIMIT ?;"
+ );
+
+ stmt.bind_text(1, query, -1, null);
+ stmt.bind_int(2, limit);
+
+ while (stmt.step() == SQLite.SQLITE_ROW) {
+ var item = row_to_item(stmt);
+ if (item != null) {
+ items.append(item);
+ }
+ }
+
+ return items_to_array(items);
+ }
+
+ /**
+ * Mark an item as read
+ */
+ public void mark_as_read(string id) throws Error {
+ var stmt = db.prepare("UPDATE feed_items SET is_read = 1 WHERE id = ?;");
+ stmt.bind_text(1, id, -1, null);
+ stmt.step();
+
+ debug("Item marked as read: %s", id);
+ }
+
+ /**
+ * Mark an item as unread
+ */
+ public void mark_as_unread(string id) throws Error {
+ var stmt = db.prepare("UPDATE feed_items SET is_read = 0 WHERE id = ?;");
+ stmt.bind_text(1, id, -1, null);
+ stmt.step();
+
+ debug("Item marked as unread: %s", id);
+ }
+
+ /**
+ * Mark an item as starred
+ */
+ public void mark_as_starred(string id) throws Error {
+ var stmt = db.prepare("UPDATE feed_items SET is_starred = 1 WHERE id = ?;");
+ stmt.bind_text(1, id, -1, null);
+ stmt.step();
+
+ debug("Item starred: %s", id);
+ }
+
+ /**
+ * Unmark an item from starred
+ */
+ public void unmark_starred(string id) throws Error {
+ var stmt = db.prepare("UPDATE feed_items SET is_starred = 0 WHERE id = ?;");
+ stmt.bind_text(1, id, -1, null);
+ stmt.step();
+
+ debug("Item unstarred: %s", id);
+ }
+
+ /**
+ * Get unread items
+ */
+ public FeedItem[] get_unread() throws Error {
+ var items = new GLib.List();
+
+ var stmt = db.prepare(
+ "SELECT id, subscription_id, title, link, description, content, author, " +
+ "published, updated, categories, enclosure_url, enclosure_type, " +
+ "enclosure_length, guid, is_read, is_starred " +
+ "FROM feed_items WHERE is_read = 0 " +
+ "ORDER BY published DESC LIMIT 100;"
+ );
+
+ while (stmt.step() == SQLite.SQLITE_ROW) {
+ var item = row_to_item(stmt);
+ if (item != null) {
+ items.append(item);
+ }
+ }
+
+ return items_to_array(items);
+ }
+
+ /**
+ * Get starred items
+ */
+ public FeedItem[] get_starred() throws Error {
+ var items = new GLib.List();
+
+ var stmt = db.prepare(
+ "SELECT id, subscription_id, title, link, description, content, author, " +
+ "published, updated, categories, enclosure_url, enclosure_type, " +
+ "enclosure_length, guid, is_read, is_starred " +
+ "FROM feed_items WHERE is_starred = 1 " +
+ "ORDER BY published DESC LIMIT 100;"
+ );
+
+ while (stmt.step() == SQLite.SQLITE_ROW) {
+ var item = row_to_item(stmt);
+ if (item != null) {
+ items.append(item);
+ }
+ }
+
+ return items_to_array(items);
+ }
+
+ /**
+ * Delete an item by ID
+ */
+ public void delete(string id) throws Error {
+ var stmt = db.prepare("DELETE FROM feed_items WHERE id = ?;");
+ stmt.bind_text(1, id, -1, null);
+ stmt.step();
+
+ debug("Item deleted: %s", id);
+ item_deleted(id);
+ }
+
+ /**
+ * Delete items by subscription ID
+ */
+ public void delete_by_subscription(string subscription_id) throws Error {
+ var stmt = db.prepare("DELETE FROM feed_items WHERE subscription_id = ?;");
+ stmt.bind_text(1, subscription_id, -1, null);
+ stmt.step();
+
+ debug("Items deleted for subscription: %s", subscription_id);
+ }
+
+ /**
+ * Delete old items (keep last N items per subscription)
+ */
+ public void cleanup_old_items(int keep_count = 100) throws Error {
+ db.begin_transaction();
+ try {
+ var stmt = db.prepare(
+ "DELETE FROM feed_items WHERE id NOT IN (" +
+ "SELECT id FROM feed_items " +
+ "ORDER BY published DESC " +
+ "LIMIT -1 OFFSET ?" +
+ ");"
+ );
+ stmt.bind_int(1, keep_count);
+ stmt.step();
+
+ db.commit();
+ debug("Old items cleaned up, kept %d", keep_count);
+ } catch (Error e) {
+ db.rollback();
+ throw new DBError.FAILED("Transaction failed: %s".printf(e.message));
+ }
+ }
+
+ /**
+ * Convert a database row to a FeedItem
+ */
+ private FeedItem? row_to_item(SQLite.Stmt stmt) {
+ try {
+ string categories_str = stmt.column_text(9);
+ string[] categories = parse_categories(categories_str);
+
+ var item = new FeedItem.with_values(
+ stmt.column_text(0), // id
+ stmt.column_text(2), // title
+ stmt.column_text(3), // link
+ stmt.column_text(4), // description
+ stmt.column_text(5), // content
+ stmt.column_text(6), // author
+ stmt.column_text(7), // published
+ stmt.column_text(8), // updated
+ categories,
+ stmt.column_text(10), // enclosure_url
+ stmt.column_text(11), // enclosure_type
+ stmt.column_text(12), // enclosure_length
+ stmt.column_text(13), // guid
+ stmt.column_text(1) // subscription_id (stored as subscription_title)
+ );
+
+ return item;
+ } catch (Error e) {
+ warning("Failed to parse item row: %s", e.message);
+ return null;
+ }
+ }
+
+ /**
+ * Format categories array as JSON string
+ */
+ private string format_categories(string[] categories) {
+ if (categories.length == 0) {
+ return "[]";
+ }
+
+ var sb = new StringBuilder();
+ sb.append("[");
+ for (var i = 0; i < categories.length; i++) {
+ if (i > 0) sb.append(",");
+ sb.append("\"");
+ sb.append(categories[i]);
+ sb.append("\"");
+ }
+ sb.append("]");
+
+ return sb.str;
+ }
+
+ /**
+ * Parse categories from JSON string
+ */
+ private string[] parse_categories(string json) {
+ if (json == null || json.length == 0 || json == "[]") {
+ return {};
+ }
+
+ try {
+ var parser = new Json.Parser();
+ if (parser.load_from_data(json)) {
+ var node = parser.get_root();
+ if (node.get_node_type() == Json.NodeType.ARRAY) {
+ var array = node.get_array();
+ var categories = new string[array.get_length()];
+ for (var i = 0; i < array.get_length(); i++) {
+ categories[i] = array.get_string_element(i);
+ }
+ return categories;
+ }
+ }
+ } catch (Error e) {
+ warning("Failed to parse categories: %s", e.message);
+ }
+
+ return {};
+ }
+
+ private FeedItem[] items_to_array(GLib.List list) {
+ FeedItem[] arr = {};
+ for (unowned var node = list; node != null; node = node.next) {
+ if (node.data != null) arr += node.data;
+ }
+ return arr;
+ }
+
+}
+
diff --git a/native-route/linux/src/database/schema.sql b/native-route/linux/src/database/schema.sql
new file mode 100644
index 0000000..30b291a
--- /dev/null
+++ b/native-route/linux/src/database/schema.sql
@@ -0,0 +1,103 @@
+-- RSSuper Database Schema
+-- SQLite with FTS5 for full-text search
+
+-- Enable foreign keys
+PRAGMA foreign_keys = ON;
+
+-- Migration tracking table
+CREATE TABLE IF NOT EXISTS schema_migrations (
+ version INTEGER PRIMARY KEY,
+ applied_at TEXT NOT NULL DEFAULT (datetime('now'))
+);
+
+-- Feed subscriptions table
+CREATE TABLE IF NOT EXISTS feed_subscriptions (
+ id TEXT PRIMARY KEY,
+ url TEXT NOT NULL UNIQUE,
+ title TEXT NOT NULL,
+ category TEXT,
+ enabled INTEGER NOT NULL DEFAULT 1,
+ fetch_interval INTEGER NOT NULL DEFAULT 60,
+ created_at TEXT NOT NULL,
+ updated_at TEXT NOT NULL,
+ last_fetched_at TEXT,
+ next_fetch_at TEXT,
+ error TEXT,
+ http_auth_username TEXT,
+ http_auth_password TEXT
+);
+
+-- Feed items table
+CREATE TABLE IF NOT EXISTS feed_items (
+ id TEXT PRIMARY KEY,
+ subscription_id TEXT NOT NULL,
+ title TEXT NOT NULL,
+ link TEXT,
+ description TEXT,
+ content TEXT,
+ author TEXT,
+ published TEXT,
+ updated TEXT,
+ categories TEXT, -- JSON array as text
+ enclosure_url TEXT,
+ enclosure_type TEXT,
+ enclosure_length TEXT,
+ guid TEXT,
+ is_read INTEGER NOT NULL DEFAULT 0,
+ is_starred INTEGER NOT NULL DEFAULT 0,
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
+ FOREIGN KEY (subscription_id) REFERENCES feed_subscriptions(id) ON DELETE CASCADE
+);
+
+-- Create index for feed items
+CREATE INDEX IF NOT EXISTS idx_feed_items_subscription ON feed_items(subscription_id);
+CREATE INDEX IF NOT EXISTS idx_feed_items_published ON feed_items(published DESC);
+CREATE INDEX IF NOT EXISTS idx_feed_items_read ON feed_items(is_read);
+CREATE INDEX IF NOT EXISTS idx_feed_items_starred ON feed_items(is_starred);
+
+-- Search history table
+CREATE TABLE IF NOT EXISTS search_history (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ query TEXT NOT NULL,
+ filters_json TEXT,
+ sort_option TEXT NOT NULL DEFAULT 'relevance',
+ page INTEGER NOT NULL DEFAULT 1,
+ page_size INTEGER NOT NULL DEFAULT 20,
+ result_count INTEGER,
+ created_at TEXT NOT NULL DEFAULT (datetime('now'))
+);
+
+CREATE INDEX IF NOT EXISTS idx_search_history_created ON search_history(created_at DESC);
+
+-- FTS5 virtual table for full-text search on feed items
+CREATE VIRTUAL TABLE IF NOT EXISTS feed_items_fts USING fts5(
+ title,
+ description,
+ content,
+ author,
+ content='feed_items',
+ content_rowid='rowid'
+);
+
+-- Trigger to keep FTS table in sync on INSERT
+CREATE TRIGGER IF NOT EXISTS feed_items_ai AFTER INSERT ON feed_items BEGIN
+ INSERT INTO feed_items_fts(rowid, title, description, content, author)
+ VALUES (new.rowid, new.title, new.description, new.content, new.author);
+END;
+
+-- Trigger to keep FTS table in sync on DELETE
+CREATE TRIGGER IF NOT EXISTS feed_items_ad AFTER DELETE ON feed_items BEGIN
+ INSERT INTO feed_items_fts(feed_items_fts, rowid, title, description, content, author)
+ VALUES('delete', old.rowid, old.title, old.description, old.content, old.author);
+END;
+
+-- Trigger to keep FTS table in sync on UPDATE
+CREATE TRIGGER IF NOT EXISTS feed_items_au AFTER UPDATE ON feed_items BEGIN
+ INSERT INTO feed_items_fts(feed_items_fts, rowid, title, description, content, author)
+ VALUES('delete', old.rowid, old.title, old.description, old.content, old.author);
+ INSERT INTO feed_items_fts(rowid, title, description, content, author)
+ VALUES (new.rowid, new.title, new.description, new.content, new.author);
+END;
+
+-- Initial migration record
+INSERT OR IGNORE INTO schema_migrations (version) VALUES (1);
diff --git a/native-route/linux/src/database/search-history-store.vala b/native-route/linux/src/database/search-history-store.vala
new file mode 100644
index 0000000..c701b41
--- /dev/null
+++ b/native-route/linux/src/database/search-history-store.vala
@@ -0,0 +1,171 @@
+/*
+ * SearchHistoryStore.vala
+ *
+ * CRUD operations for search history.
+ */
+
+/**
+ * SearchHistoryStore - Manages search history persistence
+ */
+public class RSSuper.SearchHistoryStore : Object {
+ private Database db;
+
+ /**
+ * Maximum number of history entries to keep
+ */
+ public int max_entries { get; set; default = 100; }
+
+ /**
+ * Signal emitted when a search is recorded
+ */
+ public signal void search_recorded(SearchQuery query, int result_count);
+
+ /**
+ * Signal emitted when history is cleared
+ */
+ public signal void history_cleared();
+
+ /**
+ * Create a new search history store
+ */
+ public SearchHistoryStore(Database db) {
+ this.db = db;
+ }
+
+ /**
+ * Record a search query
+ */
+ public int record_search(SearchQuery query, int result_count = 0) throws Error {
+ var stmt = db.prepare(
+ "INSERT INTO search_history (query, filters_json, sort_option, page, page_size, result_count) " +
+ "VALUES (?, ?, ?, ?, ?, ?);"
+ );
+
+ stmt.bind_text(1, query.query, -1, null);
+ stmt.bind_text(2, query.filters_json ?? "", -1, null);
+ stmt.bind_text(3, SearchFilters.sort_option_to_string(query.sort), -1, null);
+ stmt.bind_int(4, query.page);
+ stmt.bind_int(5, query.page_size);
+ stmt.bind_int(6, result_count);
+
+ stmt.step();
+
+ debug("Search recorded: %s (%d results)", query.query, result_count);
+ search_recorded(query, result_count);
+
+ // Clean up old entries if needed
+ cleanup_old_entries();
+
+ return 0; // Returns the last inserted row ID in SQLite
+ }
+
+ /**
+ * Get search history
+ */
+ public SearchQuery[] get_history(int limit = 50) throws Error {
+ var queries = new GLib.List();
+
+ var stmt = db.prepare(
+ "SELECT query, filters_json, sort_option, page, page_size, result_count, created_at " +
+ "FROM search_history " +
+ "ORDER BY created_at DESC " +
+ "LIMIT ?;"
+ );
+
+ stmt.bind_int(1, limit);
+
+ while (stmt.step() == SQLite.SQLITE_ROW) {
+ var query = row_to_query(stmt);
+ queries.append(query);
+ }
+
+ return queries_to_array(queries);
+ }
+
+ /**
+ * Get recent searches (last 24 hours)
+ */
+ public SearchQuery[] get_recent() throws Error {
+ var queries = new GLib.List();
+ var now = new DateTime.now_local();
+ var yesterday = now.add_days(-1);
+ var threshold = yesterday.format("%Y-%m-%dT%H:%M:%S");
+
+ var stmt = db.prepare(
+ "SELECT query, filters_json, sort_option, page, page_size, result_count, created_at " +
+ "FROM search_history " +
+ "WHERE created_at >= ? " +
+ "ORDER BY created_at DESC " +
+ "LIMIT 20;"
+ );
+
+ stmt.bind_text(1, threshold, -1, null);
+
+ while (stmt.step() == SQLite.SQLITE_ROW) {
+ var query = row_to_query(stmt);
+ queries.append(query);
+ }
+
+ return queries_to_array(queries);
+ }
+
+ /**
+ * Delete a search history entry by ID
+ */
+ public void delete(int id) throws Error {
+ var stmt = db.prepare("DELETE FROM search_history WHERE id = ?;");
+ stmt.bind_int(1, id);
+ stmt.step();
+
+ debug("Search history entry deleted: %d", id);
+ }
+
+ /**
+ * Clear all search history
+ */
+ public void clear() throws Error {
+ var stmt = db.prepare("DELETE FROM search_history;");
+ stmt.step();
+
+ debug("Search history cleared");
+ history_cleared();
+ }
+
+ /**
+ * Clear old search history entries
+ */
+ private void cleanup_old_entries() throws Error {
+ var stmt = db.prepare(
+ "DELETE FROM search_history WHERE id NOT IN (" +
+ "SELECT id FROM search_history ORDER BY created_at DESC LIMIT ?" +
+ ");"
+ );
+
+ stmt.bind_int(1, max_entries);
+ stmt.step();
+ }
+
+ /**
+ * Convert a database row to a SearchQuery
+ */
+ private SearchQuery row_to_query(SQLite.Stmt stmt) {
+ string query_str = stmt.column_text(0);
+ string? filters_json = stmt.column_text(1);
+ string sort_str = stmt.column_text(2);
+ int page = stmt.column_int(3);
+ int page_size = stmt.column_int(4);
+
+ return SearchQuery(query_str, page, page_size, filters_json,
+ SearchFilters.sort_option_from_string(sort_str));
+ }
+
+ private SearchQuery[] queries_to_array(GLib.List list) {
+ SearchQuery[] arr = {};
+ for (unowned var node = list; node != null; node = node.next) {
+ arr += node.data;
+ }
+ return arr;
+ }
+
+}
+
diff --git a/native-route/linux/src/database/sqlite3.vapi b/native-route/linux/src/database/sqlite3.vapi
new file mode 100644
index 0000000..75eb605
--- /dev/null
+++ b/native-route/linux/src/database/sqlite3.vapi
@@ -0,0 +1,63 @@
+/*
+ * SQLite3 C API bindings for Vala
+ */
+
+[CCode (cheader_filename = "sqlite3.h")]
+namespace SQLite {
+ [CCode (cname = "sqlite3", free_function = "sqlite3_close")]
+ public class DB {
+ [CCode (cname = "sqlite3_open")]
+ public static int open(string filename, [CCode (array_length = false)] out DB db);
+
+ [CCode (cname = "sqlite3_exec")]
+ public int exec(string sql, [CCode (array_length = false)] DBCallback callback = null, void* arg = null, [CCode (array_length = false)] out string errmsg = null);
+
+ [CCode (cname = "sqlite3_errmsg")]
+ public unowned string errmsg();
+
+ [CCode (cname = "sqlite3_prepare_v2")]
+ public int prepare_v2(string zSql, int nByte, [CCode (array_length = false)] out Stmt stmt, void* pzTail = null);
+ }
+
+ [CCode (cname = "sqlite3_stmt", free_function = "sqlite3_finalize")]
+ public class Stmt {
+ [CCode (cname = "sqlite3_step")]
+ public int step();
+
+ [CCode (cname = "sqlite3_column_count")]
+ public int column_count();
+
+ [CCode (cname = "sqlite3_column_text")]
+ public unowned string column_text(int i);
+
+ [CCode (cname = "sqlite3_column_int")]
+ public int column_int(int i);
+
+ [CCode (cname = "sqlite3_column_double")]
+ public double column_double(int i);
+
+ [CCode (cname = "sqlite3_bind_text")]
+ public int bind_text(int i, string z, int n, void* x);
+
+ [CCode (cname = "sqlite3_bind_int")]
+ public int bind_int(int i, int val);
+
+ [CCode (cname = "sqlite3_bind_double")]
+ public int bind_double(int i, double val);
+
+ [CCode (cname = "sqlite3_bind_null")]
+ public int bind_null(int i);
+ }
+
+ [CCode (cname = "SQLITE_OK")]
+ public const int SQLITE_OK;
+ [CCode (cname = "SQLITE_ROW")]
+ public const int SQLITE_ROW;
+ [CCode (cname = "SQLITE_DONE")]
+ public const int SQLITE_DONE;
+ [CCode (cname = "SQLITE_ERROR")]
+ public const int SQLITE_ERROR;
+
+ [CCode (simple_type = true)]
+ public delegate int DBCallback(void* arg, int argc, string[] argv, string[] col_names);
+}
diff --git a/native-route/linux/src/database/subscription-store.vala b/native-route/linux/src/database/subscription-store.vala
new file mode 100644
index 0000000..e1de44c
--- /dev/null
+++ b/native-route/linux/src/database/subscription-store.vala
@@ -0,0 +1,244 @@
+/*
+ * SubscriptionStore.vala
+ *
+ * CRUD operations for feed subscriptions.
+ */
+
+/**
+ * SubscriptionStore - Manages feed subscription persistence
+ */
+public class RSSuper.SubscriptionStore : Object {
+ private Database db;
+
+ /**
+ * Signal emitted when a subscription is added
+ */
+ public signal void subscription_added(FeedSubscription subscription);
+
+ /**
+ * Signal emitted when a subscription is updated
+ */
+ public signal void subscription_updated(FeedSubscription subscription);
+
+ /**
+ * Signal emitted when a subscription is deleted
+ */
+ public signal void subscription_deleted(string id);
+
+ /**
+ * Create a new subscription store
+ */
+ public SubscriptionStore(Database db) {
+ this.db = db;
+ }
+
+ /**
+ * Add a new subscription
+ */
+ public FeedSubscription add(FeedSubscription subscription) throws Error {
+ var stmt = db.prepare(
+ "INSERT INTO feed_subscriptions (id, url, title, category, enabled, fetch_interval, " +
+ "created_at, updated_at, last_fetched_at, next_fetch_at, error, http_auth_username, http_auth_password) " +
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);"
+ );
+
+ stmt.bind_text(1, subscription.id, -1, null);
+ stmt.bind_text(2, subscription.url, -1, null);
+ stmt.bind_text(3, subscription.title, -1, null);
+ stmt.bind_text(4, subscription.category ?? "", -1, null);
+ stmt.bind_int(5, subscription.enabled ? 1 : 0);
+ stmt.bind_int(6, subscription.fetch_interval);
+ stmt.bind_text(7, subscription.created_at, -1, null);
+ stmt.bind_text(8, subscription.updated_at, -1, null);
+ stmt.bind_text(9, subscription.last_fetched_at ?? "", -1, null);
+ stmt.bind_text(10, subscription.next_fetch_at ?? "", -1, null);
+ stmt.bind_text(11, subscription.error ?? "", -1, null);
+ stmt.bind_text(12, subscription.http_auth_username ?? "", -1, null);
+ stmt.bind_text(13, subscription.http_auth_password ?? "", -1, null);
+
+ stmt.step();
+
+ debug("Subscription added: %s", subscription.id);
+ subscription_added(subscription);
+
+ return subscription;
+ }
+
+ /**
+ * Get a subscription by ID
+ */
+ public FeedSubscription? get_by_id(string id) throws Error {
+ var stmt = db.prepare(
+ "SELECT id, url, title, category, enabled, fetch_interval, created_at, updated_at, " +
+ "last_fetched_at, next_fetch_at, error, http_auth_username, http_auth_password " +
+ "FROM feed_subscriptions WHERE id = ?;"
+ );
+
+ stmt.bind_text(1, id, -1, null);
+
+ if (stmt.step() == SQLite.SQLITE_ROW) {
+ return row_to_subscription(stmt);
+ }
+
+ return null;
+ }
+
+ /**
+ * Get all subscriptions
+ */
+ public FeedSubscription[] get_all() throws Error {
+ var subscriptions = new GLib.List();
+
+ var stmt = db.prepare(
+ "SELECT id, url, title, category, enabled, fetch_interval, created_at, updated_at, " +
+ "last_fetched_at, next_fetch_at, error, http_auth_username, http_auth_password " +
+ "FROM feed_subscriptions ORDER BY title;"
+ );
+
+ while (stmt.step() == SQLite.SQLITE_ROW) {
+ var subscription = row_to_subscription(stmt);
+ if (subscription != null) {
+ subscriptions.append(subscription);
+ }
+ }
+
+ return list_to_array(subscriptions);
+ }
+
+ /**
+ * Update a subscription
+ */
+ public void update(FeedSubscription subscription) throws Error {
+ var stmt = db.prepare(
+ "UPDATE feed_subscriptions SET url = ?, title = ?, category = ?, enabled = ?, " +
+ "fetch_interval = ?, updated_at = ?, last_fetched_at = ?, next_fetch_at = ?, " +
+ "error = ?, http_auth_username = ?, http_auth_password = ? " +
+ "WHERE id = ?;"
+ );
+
+ stmt.bind_text(1, subscription.url, -1, null);
+ stmt.bind_text(2, subscription.title, -1, null);
+ stmt.bind_text(3, subscription.category ?? "", -1, null);
+ stmt.bind_int(4, subscription.enabled ? 1 : 0);
+ stmt.bind_int(5, subscription.fetch_interval);
+ stmt.bind_text(6, subscription.updated_at, -1, null);
+ stmt.bind_text(7, subscription.last_fetched_at ?? "", -1, null);
+ stmt.bind_text(8, subscription.next_fetch_at ?? "", -1, null);
+ stmt.bind_text(9, subscription.error ?? "", -1, null);
+ stmt.bind_text(10, subscription.http_auth_username ?? "", -1, null);
+ stmt.bind_text(11, subscription.http_auth_password ?? "", -1, null);
+ stmt.bind_text(12, subscription.id, -1, null);
+
+ stmt.step();
+
+ debug("Subscription updated: %s", subscription.id);
+ subscription_updated(subscription);
+ }
+
+ /**
+ * Delete a subscription
+ */
+ public void remove_subscription(string id) throws Error {
+ var stmt = db.prepare("DELETE FROM feed_subscriptions WHERE id = ?;");
+ stmt.bind_text(1, id, -1, null);
+ stmt.step();
+
+ debug("Subscription deleted: %s", id);
+ subscription_deleted(id);
+ }
+
+ /**
+ * Delete a subscription by object
+ */
+ public void delete_subscription(FeedSubscription subscription) throws Error {
+ remove_subscription(subscription.id);
+ }
+
+ /**
+ * Get enabled subscriptions
+ */
+ public FeedSubscription[] get_enabled() throws Error {
+ var subscriptions = new GLib.List();
+
+ var stmt = db.prepare(
+ "SELECT id, url, title, category, enabled, fetch_interval, created_at, updated_at, " +
+ "last_fetched_at, next_fetch_at, error, http_auth_username, http_auth_password " +
+ "FROM feed_subscriptions WHERE enabled = 1 ORDER BY title;"
+ );
+
+ while (stmt.step() == SQLite.SQLITE_ROW) {
+ var subscription = row_to_subscription(stmt);
+ if (subscription != null) {
+ subscriptions.append(subscription);
+ }
+ }
+
+ return list_to_array(subscriptions);
+ }
+
+ /**
+ * Get subscriptions that need fetching
+ */
+ public FeedSubscription[] get_due_for_fetch() throws Error {
+ var subscriptions = new GLib.List();
+ var now = new DateTime.now_local();
+ var now_str = now.format("%Y-%m-%dT%H:%M:%S");
+
+ var stmt = db.prepare(
+ "SELECT id, url, title, category, enabled, fetch_interval, created_at, updated_at, " +
+ "last_fetched_at, next_fetch_at, error, http_auth_username, http_auth_password " +
+ "FROM feed_subscriptions WHERE enabled = 1 AND " +
+ "(next_fetch_at IS NULL OR next_fetch_at <= ?) " +
+ "ORDER BY next_fetch_at ASC;"
+ );
+
+ stmt.bind_text(1, now_str, -1, null);
+
+ while (stmt.step() == SQLite.SQLITE_ROW) {
+ var subscription = row_to_subscription(stmt);
+ if (subscription != null) {
+ subscriptions.append(subscription);
+ }
+ }
+
+ return list_to_array(subscriptions);
+ }
+
+ /**
+ * Convert a database row to a FeedSubscription
+ */
+ private FeedSubscription? row_to_subscription(SQLite.Stmt stmt) {
+ try {
+ var subscription = new FeedSubscription.with_values(
+ stmt.column_text(0), // id
+ stmt.column_text(1), // url
+ stmt.column_text(2), // title
+ stmt.column_int(5), // fetch_interval
+ stmt.column_text(3), // category
+ stmt.column_int(4) == 1, // enabled
+ stmt.column_text(6), // created_at
+ stmt.column_text(7), // updated_at
+ stmt.column_text(8), // last_fetched_at
+ stmt.column_text(9), // next_fetch_at
+ stmt.column_text(10), // error
+ stmt.column_text(11), // http_auth_username
+ stmt.column_text(12) // http_auth_password
+ );
+
+ return subscription;
+ } catch (Error e) {
+ warning("Failed to parse subscription row: %s", e.message);
+ return null;
+ }
+ }
+
+ private FeedSubscription[] list_to_array(GLib.List list) {
+ FeedSubscription[] arr = {};
+ for (unowned var node = list; node != null; node = node.next) {
+ if (node.data != null) arr += node.data;
+ }
+ return arr;
+ }
+
+}
+
diff --git a/native-route/linux/src/tests/database-tests.vala b/native-route/linux/src/tests/database-tests.vala
new file mode 100644
index 0000000..dd5736a
--- /dev/null
+++ b/native-route/linux/src/tests/database-tests.vala
@@ -0,0 +1,288 @@
+/*
+ * DatabaseTests.vala
+ *
+ * Unit tests for database layer.
+ */
+
+public class RSSuper.DatabaseTests : TestCase {
+ private Database? db;
+ private string test_db_path;
+
+ public override void setUp() {
+ base.setUp();
+ test_db_path = "/tmp/rssuper_test_%d.db".printf((int)Time.get_current_time());
+
+ try {
+ db = new Database(test_db_path);
+ } catch (DatabaseError e) {
+ warn("Failed to create test database: %s", e.message);
+ }
+ }
+
+ public override void tearDown() {
+ base.tearDown();
+
+ if (db != null) {
+ db.close();
+ db = null;
+ }
+
+ // Clean up test database
+ var file = File.new_for_path(test_db_path);
+ if (file.query_exists()) {
+ try {
+ file.delete();
+ } catch (DatabaseError e) {
+ warn("Failed to delete test database: %s", e.message);
+ }
+ }
+
+ // Clean up WAL file
+ var wal_file = File.new_for_path(test_db_path + "-wal");
+ if (wal_file.query_exists()) {
+ try {
+ wal_file.delete();
+ } catch (DatabaseError e) {
+ warn("Failed to delete WAL file: %s", e.message);
+ }
+ }
+ }
+
+ public void test_subscription_crud() {
+ if (db == null) return;
+
+ var store = new SubscriptionStore(db);
+
+ // Create test subscription
+ var subscription = new FeedSubscription.with_values(
+ "sub_1",
+ "https://example.com/feed.xml",
+ "Example Feed",
+ 60,
+ "Technology",
+ true,
+ "2024-01-01T00:00:00Z",
+ "2024-01-01T00:00:00Z"
+ );
+
+ // Test add
+ store.add(subscription);
+ assert_not_null(store.get_by_id("sub_1"));
+
+ // Test get
+ var retrieved = store.get_by_id("sub_1");
+ assert_not_null(retrieved);
+ assert_equal("Example Feed", retrieved.title);
+ assert_equal("https://example.com/feed.xml", retrieved.url);
+
+ // Test update
+ retrieved.title = "Updated Feed";
+ store.update(retrieved);
+ var updated = store.get_by_id("sub_1");
+ assert_equal("Updated Feed", updated.title);
+
+ // Test delete
+ store.delete("sub_1");
+ var deleted = store.get_by_id("sub_1");
+ assert_null(deleted);
+ }
+
+ public void test_subscription_list() {
+ if (db == null) return;
+
+ var store = new SubscriptionStore(db);
+
+ // Add multiple subscriptions
+ var sub1 = new FeedSubscription.with_values("sub_1", "https://feed1.com", "Feed 1");
+ var sub2 = new FeedSubscription.with_values("sub_2", "https://feed2.com", "Feed 2");
+ var sub3 = new FeedSubscription.with_values("sub_3", "https://feed3.com", "Feed 3", 60, null, false);
+
+ store.add(sub1);
+ store.add(sub2);
+ store.add(sub3);
+
+ // Test get_all
+ var all = store.get_all();
+ assert_equal(3, all.length);
+
+ // Test get_enabled
+ var enabled = store.get_enabled();
+ assert_equal(2, enabled.length);
+ }
+
+ public void test_feed_item_crud() {
+ if (db == null) return;
+
+ var sub_store = new SubscriptionStore(db);
+ var item_store = new FeedItemStore(db);
+
+ // Create subscription first
+ var subscription = new FeedSubscription.with_values(
+ "sub_1", "https://example.com/feed.xml", "Example Feed"
+ );
+ sub_store.add(subscription);
+
+ // Create test item
+ var item = new FeedItem.with_values(
+ "item_1",
+ "Test Article",
+ "https://example.com/article",
+ "This is a test description",
+ "Full content of the article",
+ "John Doe",
+ "2024-01-01T12:00:00Z",
+ "2024-01-01T12:00:00Z",
+ {"Technology", "News"},
+ null, null, null, null,
+ "Example Feed"
+ );
+
+ // Test add
+ item_store.add(item);
+ var retrieved = item_store.get_by_id("item_1");
+ assert_not_null(retrieved);
+ assert_equal("Test Article", retrieved.title);
+
+ // Test get by subscription
+ var items = item_store.get_by_subscription("sub_1");
+ assert_equal(1, items.length);
+
+ // Test mark as read
+ item_store.mark_as_read("item_1");
+
+ // Test delete
+ item_store.delete("item_1");
+ var deleted = item_store.get_by_id("item_1");
+ assert_null(deleted);
+ }
+
+ public void test_feed_item_batch() {
+ if (db == null) return;
+
+ var sub_store = new SubscriptionStore(db);
+ var item_store = new FeedItemStore(db);
+
+ // Create subscription
+ var subscription = new FeedSubscription.with_values(
+ "sub_1", "https://example.com/feed.xml", "Example Feed"
+ );
+ sub_store.add(subscription);
+
+ // Create multiple items
+ var items = new FeedItem[5];
+ for (var i = 0; i < 5; i++) {
+ items[i] = new FeedItem.with_values(
+ "item_%d".printf(i),
+ "Article %d".printf(i),
+ "https://example.com/article%d".printf(i),
+ "Description %d".printf(i),
+ null,
+ "Author %d".printf(i),
+ "2024-01-%02dT12:00:00Z".printf(i + 1),
+ null,
+ null,
+ null, null, null, null,
+ "Example Feed"
+ );
+ }
+
+ // Test batch insert
+ item_store.add_batch(items);
+
+ var all = item_store.get_by_subscription("sub_1");
+ assert_equal(5, all.length);
+ }
+
+ public void test_search_history() {
+ if (db == null) return;
+
+ var store = new SearchHistoryStore(db);
+
+ // Create test queries
+ var query1 = SearchQuery("test query", 1, 20, null, SearchSortOption.RELEVANCE);
+ var query2 = SearchQuery("another search", 1, 10, null, SearchSortOption.DATE_DESC);
+
+ // Test record
+ store.record_search(query1, 15);
+ store.record_search(query2, 8);
+
+ // Test get_history
+ var history = store.get_history();
+ assert_equal(2, history.length);
+ assert_equal("another search", history[0].query); // Most recent first
+
+ // Test get_recent
+ var recent = store.get_recent();
+ assert_equal(2, recent.length);
+ }
+
+ public void test_fts_search() {
+ if (db == null) return;
+
+ var sub_store = new SubscriptionStore(db);
+ var item_store = new FeedItemStore(db);
+
+ // Create subscription
+ var subscription = new FeedSubscription.with_values(
+ "sub_1", "https://example.com/feed.xml", "Example Feed"
+ );
+ sub_store.add(subscription);
+
+ // Add items with searchable content
+ var item1 = new FeedItem.with_values(
+ "item_1",
+ "Swift Programming Guide",
+ "https://example.com/swift",
+ "Learn Swift programming language basics",
+ "A comprehensive guide to Swift",
+ "Apple Developer",
+ "2024-01-01T12:00:00Z",
+ null,
+ null,
+ null, null, null, null,
+ "Example Feed"
+ );
+
+ var item2 = new FeedItem.with_values(
+ "item_2",
+ "Python for Data Science",
+ "https://example.com/python",
+ "Data analysis with Python and pandas",
+ "Complete Python data science tutorial",
+ "Data Team",
+ "2024-01-02T12:00:00Z",
+ null,
+ null,
+ null, null, null, null,
+ "Example Feed"
+ );
+
+ item_store.add(item1);
+ item_store.add(item2);
+
+ // Test FTS search
+ var results = item_store.search("swift");
+ assert_equal(1, results.length);
+ assert_equal("Swift Programming Guide", results[0].title);
+
+ results = item_store.search("python");
+ assert_equal(1, results.length);
+ assert_equal("Python for Data Science", results[0].title);
+ }
+
+ public static int main(string[] args) {
+ Test.init(ref args);
+
+ var suite = Test.create_suite();
+
+ var test_case = new DatabaseTests();
+ Test.add_func("/database/subscription_crud", test_case.test_subscription_crud);
+ Test.add_func("/database/subscription_list", test_case.test_subscription_list);
+ Test.add_func("/database/feed_item_crud", test_case.test_feed_item_crud);
+ Test.add_func("/database/feed_item_batch", test_case.test_feed_item_batch);
+ Test.add_func("/database/search_history", test_case.test_search_history);
+ Test.add_func("/database/fts_search", test_case.test_fts_search);
+
+ return Test.run();
+ }
+}
diff --git a/tasks/native-business-logic-migration/README.md b/tasks/native-business-logic-migration/README.md
index 3da29ef..e842606 100644
--- a/tasks/native-business-logic-migration/README.md
+++ b/tasks/native-business-logic-migration/README.md
@@ -9,13 +9,13 @@ Status legend: [ ] todo, [~] in-progress, [x] done
- [x] 02 — Design shared data models for all platforms → `02-design-shared-data-models.md`
## Phase 2: Data Models (Per Platform)
-- [ ] 03 — Implement iOS data models (Swift) → `03-implement-ios-data-models.md`
-- [ ] 04 — Implement Android data models (Kotlin) → `04-implement-android-data-models.md`
-- [ ] 05 — Implement Linux data models (C/Vala) → `05-implement-linux-data-models.md`
+- [x] 03 — Implement iOS data models (Swift) → `03-implement-ios-data-models.md`
+- [x] 04 — Implement Android data models (Kotlin) → `04-implement-android-data-models.md`
+- [x] 05 — Implement Linux data models (C/Vala) → `05-implement-linux-data-models.md`
## Phase 3: Database Layer (Per Platform)
-- [ ] 06 — Implement iOS database layer (Core Data/GRDB) → `06-implement-ios-database-layer.md`
-- [ ] 07 — Implement Android database layer (Room) → `07-implement-android-database-layer.md`
+- [x] 06 — Implement iOS database layer (Core Data/GRDB) → `06-implement-ios-database-layer.md`
+- [x] 07 — Implement Android database layer (Room) → `07-implement-android-database-layer.md`
- [ ] 08 — Implement Linux database layer (SQLite) → `08-implement-linux-database-layer.md`
## Phase 4: Feed Parsing (Per Platform)