2025-01-20 18:49:54 +03:00
|
|
|
#[cfg(feature = "ssr")]
|
|
|
|
mod db_impl {
|
|
|
|
use rusqlite::{Connection, Error};
|
|
|
|
use serde::{Deserialize, Serialize};
|
2025-01-22 20:16:43 +03:00
|
|
|
use std::sync::Arc;
|
|
|
|
use tokio::sync::Mutex;
|
|
|
|
use leptos::logging;
|
2025-02-25 15:07:28 +03:00
|
|
|
use std::collections::{HashMap, HashSet};
|
2025-02-19 22:45:24 +03:00
|
|
|
use crate::models::item::Item;
|
2025-02-21 15:11:11 +03:00
|
|
|
use leptos::logging::log;
|
2025-01-17 18:51:59 +03:00
|
|
|
|
2025-01-20 18:49:54 +03:00
|
|
|
// Define a struct to represent a database connection
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct Database {
|
2025-01-22 20:16:43 +03:00
|
|
|
conn: Arc<Mutex<Connection>>,
|
2025-01-17 18:51:59 +03:00
|
|
|
}
|
|
|
|
|
2025-01-20 18:49:54 +03:00
|
|
|
impl Database {
|
|
|
|
// Create a new database connection
|
|
|
|
pub fn new(db_path: &str) -> Result<Self, Error> {
|
|
|
|
let conn = Connection::open(db_path)?;
|
2025-01-24 01:54:25 +03:00
|
|
|
logging::log!("Database connection established at: {}", db_path);
|
2025-01-22 20:16:43 +03:00
|
|
|
Ok(Database {
|
|
|
|
conn: Arc::new(Mutex::new(conn)),
|
|
|
|
})
|
2025-01-20 18:49:54 +03:00
|
|
|
}
|
2025-01-17 18:51:59 +03:00
|
|
|
|
2025-01-20 18:49:54 +03:00
|
|
|
// Create the database schema
|
2025-01-22 20:16:43 +03:00
|
|
|
pub async fn create_schema(&self) -> Result<(), Error> {
|
|
|
|
let conn = self.conn.lock().await;
|
2025-02-17 17:04:16 +03:00
|
|
|
|
|
|
|
// 1. Properties table
|
|
|
|
conn.execute_batch(
|
|
|
|
"CREATE TABLE IF NOT EXISTS properties (
|
|
|
|
id INTEGER PRIMARY KEY,
|
2025-02-18 23:38:45 +03:00
|
|
|
name TEXT NOT NULL UNIQUE,
|
|
|
|
global_usage_count INTEGER DEFAULT 0
|
2025-02-17 17:04:16 +03:00
|
|
|
);"
|
2025-02-20 15:01:29 +03:00
|
|
|
).map_err(|e| {
|
|
|
|
eprintln!("Failed creating properties table: {}", e);
|
|
|
|
e
|
|
|
|
})?;
|
2025-02-17 17:04:16 +03:00
|
|
|
|
|
|
|
// 2. URLs table
|
2025-02-11 23:35:49 +03:00
|
|
|
conn.execute_batch(
|
|
|
|
"CREATE TABLE IF NOT EXISTS urls (
|
|
|
|
id INTEGER PRIMARY KEY,
|
2025-02-20 15:01:29 +03:00
|
|
|
url TEXT NOT NULL UNIQUE,
|
2025-02-11 23:35:49 +03:00
|
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
|
|
);",
|
2025-02-20 15:01:29 +03:00
|
|
|
).map_err(|e| {
|
|
|
|
eprintln!("Failed creating urls table: {}", e);
|
|
|
|
e
|
|
|
|
})?;
|
2025-02-17 17:04:16 +03:00
|
|
|
|
|
|
|
// 3. Items table
|
2025-01-22 20:16:43 +03:00
|
|
|
conn.execute_batch(
|
|
|
|
"CREATE TABLE IF NOT EXISTS items (
|
2025-01-20 18:49:54 +03:00
|
|
|
id TEXT PRIMARY KEY,
|
2025-02-17 17:04:16 +03:00
|
|
|
url_id INTEGER NOT NULL,
|
2025-01-20 18:49:54 +03:00
|
|
|
name TEXT NOT NULL,
|
|
|
|
description TEXT,
|
|
|
|
wikidata_id TEXT,
|
2025-02-17 17:04:16 +03:00
|
|
|
FOREIGN KEY (url_id) REFERENCES urls(id) ON DELETE CASCADE
|
2025-01-22 20:16:43 +03:00
|
|
|
);",
|
2025-02-20 15:01:29 +03:00
|
|
|
).map_err(|e| {
|
|
|
|
eprintln!("Failed creating items table: {}", e);
|
|
|
|
e
|
|
|
|
})?;
|
2025-02-17 17:04:16 +03:00
|
|
|
|
|
|
|
// 4. Junction table for custom properties
|
|
|
|
conn.execute_batch(
|
|
|
|
"CREATE TABLE IF NOT EXISTS item_properties (
|
|
|
|
item_id TEXT NOT NULL,
|
|
|
|
property_id INTEGER NOT NULL,
|
|
|
|
value TEXT NOT NULL,
|
|
|
|
PRIMARY KEY (item_id, property_id),
|
|
|
|
FOREIGN KEY (item_id) REFERENCES items(id) ON DELETE CASCADE,
|
|
|
|
FOREIGN KEY (property_id) REFERENCES properties(id) ON DELETE CASCADE
|
|
|
|
);"
|
2025-02-20 15:01:29 +03:00
|
|
|
).map_err(|e| {
|
|
|
|
eprintln!("Failed creating item_properties table: {}", e);
|
|
|
|
e
|
|
|
|
})?;
|
2025-01-20 18:49:54 +03:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2025-02-12 15:55:17 +03:00
|
|
|
// Insert a new URL into the database
|
|
|
|
pub async fn insert_url(&self, url: &str) -> Result<i64, Error> {
|
|
|
|
let conn = self.conn.lock().await;
|
|
|
|
let mut stmt = conn.prepare("INSERT INTO urls (url) VALUES (?)")?;
|
|
|
|
let url_id = stmt.insert(&[url])?;
|
|
|
|
logging::log!("URL inserted: {}", url);
|
|
|
|
Ok(url_id)
|
|
|
|
}
|
|
|
|
|
2025-01-28 14:36:17 +03:00
|
|
|
pub async fn delete_item(&self, item_id: &str) -> Result<(), Error> {
|
|
|
|
let conn = self.conn.lock().await;
|
|
|
|
conn.execute("DELETE FROM items WHERE id = ?", &[item_id])?;
|
|
|
|
logging::log!("Item deleted: {}", item_id);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn delete_property(&self, property: &str) -> Result<(), Error> {
|
|
|
|
let conn = self.conn.lock().await;
|
|
|
|
let query = format!("UPDATE items SET custom_properties = json_remove(custom_properties, '$.{}')", property);
|
|
|
|
conn.execute(&query, []).map_err(|e| Error::from(e))?;
|
|
|
|
logging::log!("Property deleted: {}", property);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2025-01-20 18:49:54 +03:00
|
|
|
// Retrieve all items from the database
|
2025-01-22 20:16:43 +03:00
|
|
|
pub async fn get_items(&self) -> Result<Vec<DbItem>, Error> {
|
|
|
|
let conn = self.conn.lock().await;
|
|
|
|
let mut stmt = conn.prepare("SELECT * FROM items;")?;
|
2025-01-20 18:49:54 +03:00
|
|
|
let items = stmt.query_map([], |row| {
|
|
|
|
Ok(DbItem {
|
|
|
|
id: row.get(0)?,
|
|
|
|
name: row.get(1)?,
|
|
|
|
description: row.get(2)?,
|
|
|
|
wikidata_id: row.get(3)?,
|
|
|
|
})
|
|
|
|
})?;
|
|
|
|
let mut result = Vec::new();
|
|
|
|
for item in items {
|
|
|
|
result.push(item?);
|
|
|
|
}
|
2025-01-22 20:16:43 +03:00
|
|
|
logging::log!("Fetched {} items from the database", result.len()); // Log with Leptos
|
2025-01-20 18:49:54 +03:00
|
|
|
Ok(result)
|
2025-01-17 18:51:59 +03:00
|
|
|
}
|
2025-02-12 15:55:17 +03:00
|
|
|
|
|
|
|
// Retrieve all items from the database for a specific URL
|
2025-02-19 22:45:24 +03:00
|
|
|
pub async fn get_items_by_url(&self, url: &str) -> Result<Vec<Item>, Error> {
|
2025-02-12 15:55:17 +03:00
|
|
|
let conn = self.conn.lock().await;
|
2025-02-24 10:46:13 +03:00
|
|
|
let url_id: Option<i64> = match conn.query_row(
|
|
|
|
"SELECT id FROM urls WHERE url = ?",
|
|
|
|
&[url],
|
|
|
|
|row| row.get(0)
|
|
|
|
) {
|
|
|
|
Ok(id) => Some(id),
|
|
|
|
Err(rusqlite::Error::QueryReturnedNoRows) => None,
|
|
|
|
Err(e) => return Err(e),
|
|
|
|
};
|
|
|
|
|
|
|
|
let url_id = match url_id {
|
|
|
|
Some(id) => id,
|
|
|
|
None => return Ok(Vec::new()), // Return empty list if URL not found
|
|
|
|
};
|
2025-02-25 01:38:32 +03:00
|
|
|
|
|
|
|
log!("Fetching items for URL '{}' (ID: {})", url, url_id);
|
|
|
|
|
2025-02-24 10:46:13 +03:00
|
|
|
|
2025-02-19 22:45:24 +03:00
|
|
|
let mut stmt = conn.prepare(
|
|
|
|
"SELECT i.id, i.name, i.description, i.wikidata_id,
|
|
|
|
p.name AS prop_name, ip.value
|
|
|
|
FROM items i
|
|
|
|
LEFT JOIN item_properties ip ON i.id = ip.item_id
|
|
|
|
LEFT JOIN properties p ON ip.property_id = p.id
|
|
|
|
WHERE i.url_id = ?"
|
|
|
|
)?;
|
|
|
|
let mut items: HashMap<String, Item> = HashMap::new();
|
|
|
|
|
|
|
|
let rows = stmt.query_map([url_id], |row| {
|
|
|
|
Ok((
|
|
|
|
row.get::<_, String>(0)?, // id
|
|
|
|
row.get::<_, String>(1)?, // name
|
|
|
|
row.get::<_, String>(2)?, // description
|
|
|
|
row.get::<_, Option<String>>(3)?, // wikidata_id
|
|
|
|
row.get::<_, Option<String>>(4)?, // prop_name
|
|
|
|
row.get::<_, Option<String>>(5)?, // value
|
|
|
|
))
|
2025-02-12 15:55:17 +03:00
|
|
|
})?;
|
2025-02-19 22:45:24 +03:00
|
|
|
|
|
|
|
for row in rows {
|
|
|
|
let (id, name, desc, wd_id, prop, val) = row?;
|
|
|
|
let item = items.entry(id.clone()).or_insert(Item {
|
|
|
|
id,
|
|
|
|
name,
|
|
|
|
description: desc,
|
|
|
|
wikidata_id: wd_id,
|
|
|
|
custom_properties: HashMap::new(),
|
|
|
|
});
|
|
|
|
|
|
|
|
if let (Some(p), Some(v)) = (prop, val) {
|
|
|
|
item.custom_properties.insert(p, v);
|
|
|
|
}
|
2025-02-12 15:55:17 +03:00
|
|
|
}
|
2025-02-19 22:45:24 +03:00
|
|
|
|
|
|
|
Ok(items.into_values().collect())
|
2025-02-12 15:55:17 +03:00
|
|
|
}
|
|
|
|
|
2025-02-25 15:07:28 +03:00
|
|
|
async fn get_or_create_property(
|
|
|
|
&self,
|
|
|
|
tx: &mut rusqlite::Transaction<'_>,
|
|
|
|
prop: &str
|
|
|
|
) -> Result<i64, Error> {
|
|
|
|
match tx.query_row(
|
2025-02-18 23:38:45 +03:00
|
|
|
"SELECT id FROM properties WHERE name = ?",
|
2025-02-25 15:07:28 +03:00
|
|
|
[prop],
|
|
|
|
|row| row.get::<_, i64>(0)
|
|
|
|
) {
|
2025-02-18 23:38:45 +03:00
|
|
|
Ok(id) => Ok(id),
|
2025-02-25 15:07:28 +03:00
|
|
|
Err(rusqlite::Error::QueryReturnedNoRows) => {
|
|
|
|
tx.execute("INSERT INTO properties (name) VALUES (?)", [prop])?;
|
|
|
|
Ok(tx.last_insert_rowid())
|
2025-02-18 23:38:45 +03:00
|
|
|
}
|
2025-02-25 15:07:28 +03:00
|
|
|
Err(e) => Err(e.into()),
|
2025-02-18 23:38:45 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2025-02-12 15:55:17 +03:00
|
|
|
// Insert a new item into the database for a specific URL
|
2025-02-25 01:38:32 +03:00
|
|
|
pub async fn insert_item_by_url(&self, url: &str, item: &Item) -> Result<(), Error> {
|
|
|
|
log!("[DB] Starting insert for URL: {}, Item: {}", url, item.id);
|
2025-02-21 15:11:11 +03:00
|
|
|
|
2025-02-25 01:38:32 +03:00
|
|
|
// 1. Check database lock acquisition
|
|
|
|
let lock_start = std::time::Instant::now();
|
|
|
|
let mut conn = self.conn.lock().await;
|
|
|
|
log!("[DB] Lock acquired in {:?}", lock_start.elapsed());
|
|
|
|
|
|
|
|
// 2. Transaction handling
|
|
|
|
log!("[DB] Starting transaction");
|
2025-02-25 15:07:28 +03:00
|
|
|
let mut tx = conn.transaction().map_err(|e| {
|
2025-02-25 01:38:32 +03:00
|
|
|
log!("[DB] Transaction start failed: {:?}", e);
|
|
|
|
e
|
|
|
|
})?;
|
|
|
|
|
|
|
|
// 3. URL handling
|
|
|
|
log!("[DB] Checking URL existence: {}", url);
|
|
|
|
let url_id = match tx.query_row(
|
|
|
|
"SELECT id FROM urls WHERE url = ?",
|
|
|
|
[url],
|
|
|
|
|row| row.get::<_, i64>(0)
|
|
|
|
) {
|
|
|
|
Ok(id) => {
|
|
|
|
log!("[DB] Found existing URL ID: {}", id);
|
|
|
|
id
|
|
|
|
},
|
|
|
|
Err(rusqlite::Error::QueryReturnedNoRows) => {
|
|
|
|
log!("[DB] Inserting new URL");
|
|
|
|
tx.execute("INSERT INTO urls (url) VALUES (?)", [url])?;
|
|
|
|
let id = tx.last_insert_rowid();
|
|
|
|
log!("[DB] Created URL ID: {}", id);
|
|
|
|
id
|
|
|
|
}
|
|
|
|
Err(e) => return Err(e.into()),
|
2025-02-18 23:38:45 +03:00
|
|
|
};
|
2025-02-25 01:38:32 +03:00
|
|
|
|
|
|
|
// 4. Item insertion
|
2025-02-25 02:18:19 +03:00
|
|
|
log!("[DB] Upserting item");
|
|
|
|
tx.execute(
|
|
|
|
"INSERT INTO items (id, url_id, name, description, wikidata_id)
|
|
|
|
VALUES (?, ?, ?, ?, ?)
|
|
|
|
ON CONFLICT(id) DO UPDATE SET
|
|
|
|
url_id = excluded.url_id,
|
|
|
|
name = excluded.name,
|
|
|
|
description = excluded.description,
|
|
|
|
wikidata_id = excluded.wikidata_id",
|
2025-02-25 01:38:32 +03:00
|
|
|
rusqlite::params![
|
|
|
|
&item.id,
|
|
|
|
url_id,
|
|
|
|
&item.name,
|
|
|
|
&item.description,
|
|
|
|
&item.wikidata_id
|
|
|
|
],
|
2025-02-25 02:18:19 +03:00
|
|
|
)?;
|
|
|
|
log!("[DB] Item upserted successfully");
|
2025-02-25 01:38:32 +03:00
|
|
|
// Property handling with enhanced logging
|
2025-02-25 15:07:28 +03:00
|
|
|
log!("[DB] Synchronizing properties for item {}", item.id);
|
|
|
|
let existing_props = {
|
|
|
|
// Prepare statement and collect existing properties
|
|
|
|
let mut stmt = tx.prepare(
|
|
|
|
"SELECT p.name, ip.value
|
|
|
|
FROM item_properties ip
|
|
|
|
JOIN properties p ON ip.property_id = p.id
|
|
|
|
WHERE ip.item_id = ?"
|
|
|
|
)?;
|
2025-02-25 16:47:58 +03:00
|
|
|
|
2025-02-25 15:07:28 +03:00
|
|
|
let mapped_rows = stmt.query_map([&item.id], |row| {
|
|
|
|
Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?))
|
|
|
|
})?;
|
2025-02-25 01:38:32 +03:00
|
|
|
|
2025-02-25 15:07:28 +03:00
|
|
|
mapped_rows.collect::<Result<HashMap<String, String>, _>>()?
|
|
|
|
};
|
|
|
|
|
|
|
|
for (prop, value) in &item.custom_properties {
|
|
|
|
// Update existing or insert new
|
|
|
|
let prop_id = self.get_or_create_property(&mut tx, prop).await?;
|
|
|
|
if let Some(existing_value) = existing_props.get(prop) {
|
|
|
|
if existing_value != value {
|
|
|
|
log!("[DB] Updating property {} from '{}' to '{}'", prop, existing_value, value);
|
|
|
|
tx.execute(
|
|
|
|
"UPDATE item_properties
|
|
|
|
SET value = ?
|
|
|
|
WHERE item_id = ?
|
|
|
|
AND property_id = (SELECT id FROM properties WHERE name = ?)",
|
|
|
|
rusqlite::params![value, &item.id, prop],
|
|
|
|
)?;
|
2025-02-25 01:38:32 +03:00
|
|
|
}
|
2025-02-25 15:07:28 +03:00
|
|
|
} else {
|
|
|
|
log!("[DB] Adding new property {}", prop);
|
|
|
|
tx.execute(
|
|
|
|
"INSERT INTO item_properties (item_id, property_id, value)
|
|
|
|
VALUES (?, ?, ?)",
|
|
|
|
rusqlite::params![&item.id, prop_id, value],
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Remove deleted properties
|
|
|
|
let current_props: HashSet<&str> = item.custom_properties.keys().map(|s| s.as_str()).collect();
|
|
|
|
for (existing_prop, _) in existing_props {
|
|
|
|
if !current_props.contains(existing_prop.as_str()) {
|
|
|
|
log!("[DB] Removing deleted property {}", existing_prop);
|
|
|
|
tx.execute(
|
|
|
|
"DELETE FROM item_properties
|
|
|
|
WHERE item_id = ?
|
|
|
|
AND property_id = (SELECT id FROM properties WHERE name = ?)",
|
|
|
|
rusqlite::params![&item.id, existing_prop],
|
|
|
|
)?;
|
|
|
|
}
|
2025-02-12 15:55:17 +03:00
|
|
|
}
|
2025-02-25 01:38:32 +03:00
|
|
|
tx.commit()?;
|
|
|
|
log!("[DB] Transaction committed successfully");
|
2025-02-12 15:55:17 +03:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
// Delete an item from the database for a specific URL
|
|
|
|
pub async fn delete_item_by_url(&self, url: &str, item_id: &str) -> Result<(), Error> {
|
2025-02-25 16:47:58 +03:00
|
|
|
let mut conn = self.conn.lock().await;
|
|
|
|
let tx = conn.transaction()?;
|
|
|
|
|
|
|
|
// Get URL ID
|
|
|
|
let url_id: i64 = tx.query_row(
|
|
|
|
"SELECT id FROM urls WHERE url = ?",
|
|
|
|
[url],
|
|
|
|
|row| row.get(0)
|
|
|
|
)?;
|
|
|
|
|
|
|
|
// Delete item and properties
|
|
|
|
tx.execute(
|
|
|
|
"DELETE FROM items WHERE id = ? AND url_id = ?",
|
|
|
|
[item_id, &url_id.to_string()],
|
|
|
|
)?;
|
|
|
|
|
|
|
|
tx.commit()?;
|
2025-02-12 15:55:17 +03:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
// Delete a property from the database for a specific URL
|
|
|
|
pub async fn delete_property_by_url(&self, url: &str, property: &str) -> Result<(), Error> {
|
2025-02-25 16:47:58 +03:00
|
|
|
let mut conn = self.conn.lock().await;
|
|
|
|
let tx = conn.transaction()?;
|
2025-02-19 22:45:24 +03:00
|
|
|
|
2025-02-25 16:47:58 +03:00
|
|
|
// Get URL ID
|
|
|
|
let url_id: i64 = tx.query_row(
|
|
|
|
"SELECT id FROM urls WHERE url = ?",
|
|
|
|
[url],
|
|
|
|
|row| row.get(0)
|
|
|
|
)?;
|
|
|
|
|
|
|
|
// Delete property from all items in this URL
|
|
|
|
tx.execute(
|
2025-02-19 22:45:24 +03:00
|
|
|
"DELETE FROM item_properties
|
2025-02-25 16:47:58 +03:00
|
|
|
WHERE property_id IN (
|
|
|
|
SELECT id FROM properties WHERE name = ?
|
|
|
|
)
|
|
|
|
AND item_id IN (
|
|
|
|
SELECT id FROM items WHERE url_id = ?
|
|
|
|
)",
|
|
|
|
[property, &url_id.to_string()],
|
2025-02-19 22:45:24 +03:00
|
|
|
)?;
|
2025-02-25 16:47:58 +03:00
|
|
|
|
|
|
|
tx.commit()?;
|
2025-02-12 15:55:17 +03:00
|
|
|
Ok(())
|
|
|
|
}
|
2025-02-25 16:47:58 +03:00
|
|
|
|
2025-02-21 15:11:11 +03:00
|
|
|
// function to log database state
|
|
|
|
pub async fn debug_dump(&self) -> Result<(), Error> {
|
|
|
|
let conn = self.conn.lock().await;
|
|
|
|
log!("[DATABASE DEBUG] URLs:");
|
|
|
|
let mut stmt = conn.prepare("SELECT id, url FROM urls")?;
|
|
|
|
let urls = stmt.query_map([], |row| {
|
|
|
|
Ok(format!("ID: {}, URL: {}", row.get::<_, i64>(0)?, row.get::<_, String>(1)?))
|
|
|
|
})?;
|
|
|
|
for url in urls {
|
|
|
|
log!("[DATABASE DEBUG] {}", url?);
|
|
|
|
}
|
|
|
|
|
|
|
|
log!("[DATABASE DEBUG] Items:");
|
|
|
|
let mut stmt = conn.prepare("SELECT id, name FROM items")?;
|
|
|
|
let items = stmt.query_map([], |row| {
|
|
|
|
Ok(format!("ID: {}, Name: '{}'", row.get::<_, String>(0)?, row.get::<_, String>(1)?))
|
|
|
|
})?;
|
|
|
|
for item in items {
|
|
|
|
log!("[DATABASE DEBUG] {}", item?);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2025-01-20 18:49:54 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// Define a struct to represent an item in the database
|
2025-01-22 14:14:18 +03:00
|
|
|
#[derive(Debug, Deserialize, Serialize, Clone)]
|
2025-01-20 18:49:54 +03:00
|
|
|
pub struct DbItem {
|
|
|
|
pub id: String,
|
|
|
|
pub name: String,
|
|
|
|
pub description: String,
|
|
|
|
pub wikidata_id: Option<String>,
|
2025-01-22 20:16:43 +03:00
|
|
|
}
|
2025-01-17 18:51:59 +03:00
|
|
|
}
|
|
|
|
|
2025-01-20 18:49:54 +03:00
|
|
|
#[cfg(feature = "ssr")]
|
2025-02-19 22:45:24 +03:00
|
|
|
pub use db_impl::{Database, DbItem};
|