Compare commits
118 commits
Author | SHA1 | Date | |
---|---|---|---|
f35c7cd085 | |||
e893e14c26 | |||
303b713d59 | |||
46e9b4e48e | |||
8c7946091f | |||
9d21d9999f | |||
40bb35d6a8 | |||
ef7245b716 | |||
5c3070bfc0 | |||
a9611a08e4 | |||
ebb1afd1af | |||
734e710d8f | |||
1f52901885 | |||
f0356e9d0c | |||
69430fae8a | |||
fe98c56872 | |||
12f4043e83 | |||
9a7a8e575c | |||
3126d90f5a | |||
8c1cab3615 | |||
85dce655e4 | |||
cdca9e7faa | |||
5465811781 | |||
d6d0ab18ec | |||
d806c0c5dc | |||
3ef759e5c2 | |||
f87f88db2d | |||
947d244326 | |||
db15e33ebd | |||
32e79ea609 | |||
11e4935055 | |||
7e5f3400ef | |||
414e91a825 | |||
896de305cc | |||
47c87159ae | |||
8ac1d77e06 | |||
6c2442a82b | |||
04457fef62 | |||
db29d1e05a | |||
c96dacaaeb | |||
aa9743fd2b | |||
88c6acd7e4 | |||
505647b432 | |||
1b99027dbf | |||
d77a806fe7 | |||
5a14111db7 | |||
7e288b3a82 | |||
f51d40a4d0 | |||
a47d6b2e3a | |||
0a05b41ffa | |||
197e7be2a8 | |||
2e0b038e2a | |||
b9f3214a38 | |||
bca34d1ebc | |||
c9b24faad7 | |||
03ffeb10fc | |||
b3ac709526 | |||
de14061b9a | |||
fd39e3b967 | |||
ad9942a44f | |||
585a4a6eb7 | |||
e90a6be010 | |||
63aaa57fa1 | |||
a35d4d557d | |||
2bcdea79dc | |||
a379e93f44 | |||
a8d8e9a131 | |||
af1e6d949f | |||
5815c9fe10 | |||
8e3c87f315 | |||
b6b1ebde9c | |||
74bd1a89e5 | |||
9beb997125 | |||
63f11f6a2d | |||
eba20abf5a | |||
ecc991cc24 | |||
8860ace51f | |||
7939c9e7b6 | |||
fddec7f728 | |||
1a5c245250 | |||
e72ed778a2 | |||
bfded464c9 | |||
ce1e93fc49 | |||
443c7a7e0c | |||
4bfd47d8c4 | |||
94ed4c46b9 | |||
25b3128181 | |||
23cd674e31 | |||
af921088f9 | |||
a40e9c98c4 | |||
2d072f3303 | |||
792b4daf04 | |||
9eb930da19 | |||
e0c49ffa86 | |||
1318319ad1 | |||
ac8eb8118d | |||
4ff9928a94 | |||
68b458df5e | |||
afa3bd3ece | |||
c38f19d76c | |||
49315128f8 | |||
2455619735 | |||
3fa56abc83 | |||
e0e5fc49c2 | |||
c1207f613d | |||
fc13b0dae6 | |||
3ed12c80a6 | |||
0ac35c3ca5 | |||
e46b693e56 | |||
291cb05847 | |||
af3f89c561 | |||
5bd19803fe | |||
29434dc37c | |||
dc70316bae | |||
4760364491 | |||
c8f32d027f | |||
a99b5164d8 | |||
1f81eae135 |
18 changed files with 2515 additions and 492 deletions
5
.dockerignore
Normal file
5
.dockerignore
Normal file
|
@ -0,0 +1,5 @@
|
|||
target/
|
||||
**/*.rs.bk
|
||||
node_modules/
|
||||
Dockerfile
|
||||
docker-compose.yml
|
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -15,4 +15,8 @@ playwright/.cache/
|
|||
# Sass cache dir
|
||||
.sass-cache/
|
||||
|
||||
.idea/
|
||||
.idea/
|
||||
|
||||
# Ignore database file
|
||||
compareware.db
|
||||
.qodo
|
||||
|
|
707
Cargo.lock
generated
707
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -15,9 +15,11 @@ leptos = { version = "0.6" }
|
|||
leptos_meta = { version = "0.6" }
|
||||
leptos_actix = { version = "0.6", optional = true }
|
||||
leptos_router = { version = "0.6" }
|
||||
paste = "1.0"
|
||||
wasm-bindgen = "=0.2.99"
|
||||
rusqlite = { version = "0.27.0", optional = true}
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
uuid = { version = "1.0", features = ["v4"] }
|
||||
uuid = { version = "1.0", features = ["v4", "js"] }
|
||||
web-sys = { version = "0.3", features = ["Event"] }
|
||||
nostr-sdk = "0.37"
|
||||
tokio = "1"
|
||||
|
@ -28,8 +30,12 @@ wasm-bindgen-futures = "0.4"
|
|||
serde_json="1.0.133"
|
||||
thiserror = "2.0.9"
|
||||
zerofrom = "0.1"
|
||||
mio = "0.8"
|
||||
chrono = "0.4"
|
||||
urlencoding = "2.1.2"
|
||||
|
||||
[features]
|
||||
default = ["ssr"]
|
||||
csr = ["leptos/csr", "leptos_meta/csr", "leptos_router/csr"]
|
||||
hydrate = ["leptos/hydrate", "leptos_meta/hydrate", "leptos_router/hydrate"]
|
||||
ssr = [
|
||||
|
@ -39,6 +45,7 @@ ssr = [
|
|||
"leptos/ssr",
|
||||
"leptos_meta/ssr",
|
||||
"leptos_router/ssr",
|
||||
"dep:rusqlite"
|
||||
]
|
||||
|
||||
# Override secp256k1's default features
|
||||
|
|
78
README.md
78
README.md
|
@ -1,6 +1,7 @@
|
|||
# CompareWare
|
||||
# [CompareWare](https://compareware.org/)
|
||||
|
||||
CompareWare is an open-source platform for comparing tools (software, hardware, etc.) with structured, crowdsourced data. It combines **Leptos** for a modern, reactive frontend and **Nostr** for decentralized data storage.
|
||||
CompareWare is an open-source platform for comparing tools (software, hardware, etc.) with structured, crowdsourced data.
|
||||
It combines Rust's **Leptos** for a modern, reactive frontend and **Nostr** for decentralized data storage (TBI).
|
||||
|
||||
## **Features**
|
||||
- **Item Management**: Add, view, and manage items with metadata and key-value tags.
|
||||
|
@ -25,7 +26,78 @@ CompareWare is an open-source platform for comparing tools (software, hardware,
|
|||
```bash
|
||||
cargo leptos serve
|
||||
```
|
||||
3. Open your browser at [http://localhost:3000](http://localhost:3000)
|
||||
3. Open your browser at [localhost:3000](http://localhost:3000)
|
||||
|
||||
## **Database Schema**
|
||||
### Key Concepts
|
||||
- **PK (Primary Key)**: Unique identifier for table records (🔑)
|
||||
- **FK (Foreign Key)**: Reference linking related tables (➡️)
|
||||
- **Core (core properties)**: name and description.
|
||||
|
||||
### Tables Overview
|
||||
|
||||
| Table | Columns (PK/FK) | Description | Example Data |
|
||||
|-------|------------------|-------------|--------------|
|
||||
| **urls** | `id` (PK), `url`, `created_at` | Stores comparison URLs | `1, "/laptops", 2024-03-01` |
|
||||
| **items** | `id` (PK), `url_id` (FK), `wikidata_id` | Comparison items | `"item1", 1, "Q214276"` |
|
||||
| **properties** | `id` (PK), `name` | All available properties (including core) | `1.0, "name"`<br>`2.0, "description"`<br>`3.0, "screen_size"` |
|
||||
| **item_properties** | `item_id` (PK/FK), `property_id` (PK/FK), `value` | All property values including name/description | `"item1", 1.0, "MacBook Pro"`<br>`"item1", 2.0, "16-inch laptop"`<br>`"item1", 3.0, "16 inches"` |
|
||||
| **selected_properties** | `url_id` (PK/FK), `property_id` (PK/FK) | Active properties per URL (excludes core) | `1, 3.0` |
|
||||
|
||||
### Data Flow
|
||||
```mermaid
|
||||
flowchart LR
|
||||
User -->|Creates| urls
|
||||
User -->|Adds| items
|
||||
User -->|Defines| properties
|
||||
User -->|Selects| selected_properties
|
||||
User -->|Sets Values| item_properties
|
||||
|
||||
urls -->|url_id| items
|
||||
urls -->|url_id| selected_properties
|
||||
properties -->|property_id| selected_properties
|
||||
items -->|item_id| item_properties
|
||||
properties -->|property_id| item_properties
|
||||
```
|
||||
|
||||
### Properties data flow
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant User
|
||||
participant App as Application
|
||||
participant Wikidata
|
||||
|
||||
User->>App: Enters search
|
||||
App->>Wikidata: fetch_wikidata_suggestions()
|
||||
Wikidata-->>App: Return suggestions
|
||||
App->>User: Show suggestions
|
||||
|
||||
User->>App: Selects item
|
||||
App->>Wikidata: fetch_item_properties()
|
||||
Wikidata-->>App: Return properties (IDs + values)
|
||||
|
||||
App->>Wikidata: fetch_property_labels()
|
||||
Wikidata-->>App: Return labels
|
||||
App->>App: Combine labels + properties
|
||||
App->>User: Show labeled properties
|
||||
```
|
||||
## **Docker Deployment**
|
||||
|
||||
### **Prerequisites**
|
||||
- Docker installed on your system
|
||||
- Docker Compose (usually included with Docker Desktop)
|
||||
|
||||
### **Running with Docker**
|
||||
1. Clone the repository:
|
||||
```bash
|
||||
git clone https://forge.ftt.gmbh/ryanmwangi/Compware.git
|
||||
cd compareware
|
||||
```
|
||||
2. Start the container:
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
3. Access the application at: [http://localhost:3000](http://localhost:3000)
|
||||
|
||||
### **Collaboration**
|
||||
We welcome contributions! Here’s how you can help:
|
||||
|
|
11
docker-compose.yml
Normal file
11
docker-compose.yml
Normal file
|
@ -0,0 +1,11 @@
|
|||
services:
|
||||
app:
|
||||
build: .
|
||||
ports:
|
||||
- "3000:3000"
|
||||
volumes:
|
||||
- ./compareware.db:/app/compareware.db
|
||||
environment:
|
||||
- RUST_LOG=info
|
||||
- LEPTOS_ENV=production
|
||||
restart: unless-stopped
|
56
dockerfile
Normal file
56
dockerfile
Normal file
|
@ -0,0 +1,56 @@
|
|||
# Build stage
|
||||
FROM rust:1.83.0-slim-bullseye as builder
|
||||
|
||||
# Install essential build tools
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
libsqlite3-dev \
|
||||
build-essential \
|
||||
clang \
|
||||
libssl-dev \
|
||||
pkg-config \
|
||||
curl \
|
||||
cmake \
|
||||
protobuf-compiler \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Rust toolchain
|
||||
RUN rustup component add rust-src
|
||||
|
||||
# Install cargo-leptos & wasm-bindgen-cli
|
||||
RUN cargo install cargo-leptos --version 0.2.24 --locked
|
||||
RUN cargo install wasm-bindgen-cli --version 0.2.99 --locked
|
||||
|
||||
# Build application
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
# Explicitly set WASM target
|
||||
RUN rustup target add wasm32-unknown-unknown
|
||||
# Build project
|
||||
ENV LEPTOS_OUTPUT_NAME="compareware"
|
||||
|
||||
# Build with release profile
|
||||
RUN cargo leptos build --release
|
||||
|
||||
# Runtime stage
|
||||
FROM debian:bullseye-slim
|
||||
|
||||
# Install runtime dependencies in Debian
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
libssl-dev \
|
||||
libsqlite3-0 \
|
||||
ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy build artifacts
|
||||
COPY --from=builder /app/target/release/compareware /app/
|
||||
COPY --from=builder /app/target/site /app/site
|
||||
COPY assets /app/assets
|
||||
|
||||
# Configure container, expose port and set entrypoint
|
||||
WORKDIR /app
|
||||
EXPOSE 3000
|
||||
ENV LEPTOS_SITE_ADDR=0.0.0.0:3000
|
||||
ENV LEPTOS_SITE_ROOT="site"
|
||||
CMD ["./compareware"]
|
|
@ -1,3 +1,3 @@
|
|||
[toolchain]
|
||||
channel = "1.83.0"
|
||||
channel = "1.82.0"
|
||||
targets = [ "wasm32-unknown-unknown" ]
|
182
src/api.rs
Normal file
182
src/api.rs
Normal file
|
@ -0,0 +1,182 @@
|
|||
#[cfg(feature = "ssr")]
|
||||
use actix_web::{web, HttpResponse};
|
||||
#[cfg(feature = "ssr")]
|
||||
use crate::db::Database;
|
||||
#[cfg(feature = "ssr")]
|
||||
use std::sync::Arc;
|
||||
#[cfg(feature = "ssr")]
|
||||
use tokio::sync::Mutex;
|
||||
#[cfg(feature = "ssr")]
|
||||
use crate::models::item::Item;
|
||||
#[cfg(feature = "ssr")]
|
||||
use std::collections::HashMap;
|
||||
#[cfg(feature = "ssr")]
|
||||
use leptos::logging::log;
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(feature = "ssr")]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ItemRequest {
|
||||
pub url: String,
|
||||
pub item: Item,
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
pub async fn get_items(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
url: web::Query<String>,
|
||||
) -> HttpResponse {
|
||||
log!("[SERVER] Received request for URL: {}", url);
|
||||
|
||||
let db = db.lock().await;
|
||||
match db.get_items_by_url(&url).await {
|
||||
Ok(items) => {
|
||||
log!("[SERVER] Returning {} items for URL: {}", items.len(), url);
|
||||
HttpResponse::Ok().json(items)
|
||||
},
|
||||
Err(err) => {
|
||||
log!("[SERVER ERROR] Failed to fetch items for {}: {:?}", url, err);
|
||||
HttpResponse::InternalServerError().body("Failed to fetch items")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
pub async fn create_item(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
request: web::Json<ItemRequest>,
|
||||
) -> HttpResponse {
|
||||
let db = db.lock().await;
|
||||
let url = request.url.clone();
|
||||
let item = request.item.clone();
|
||||
let item_id = request.item.id.clone();
|
||||
// request logging
|
||||
log!("[API] Received item request - URL: {}, Item ID: {}",
|
||||
request.url, request.item.id);
|
||||
|
||||
// raw JSON logging
|
||||
let raw_json = serde_json::to_string(&request.into_inner()).unwrap();
|
||||
log!("[API] Raw request JSON: {}", raw_json);
|
||||
|
||||
match db.insert_item_by_url(&url, &item).await {
|
||||
Ok(_) => {
|
||||
log!("[API] Successfully saved item ID: {}", item_id);
|
||||
HttpResponse::Ok().json(item)
|
||||
},
|
||||
Err(e) => {
|
||||
log!("[API] Database error: {:?}", e);
|
||||
HttpResponse::BadRequest().body(format!("Database error: {}", e))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
pub async fn delete_item(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
path: web::Path<(String, String)>, // (url, item_id)
|
||||
) -> HttpResponse {
|
||||
let (url, item_id) = path.into_inner();
|
||||
log!("[API] Deleting item {} from URL {}", item_id, url);
|
||||
let db = db.lock().await;
|
||||
match db.delete_item_by_url(&url, &item_id).await {
|
||||
Ok(_) => HttpResponse::Ok().finish(),
|
||||
Err(e) => {
|
||||
log!("[API] Delete error: {:?}", e);
|
||||
HttpResponse::InternalServerError().body(e.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
pub async fn delete_property(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
path: web::Path<(String, String)>, // (url, property)
|
||||
) -> HttpResponse {
|
||||
let (url, property) = path.into_inner();
|
||||
log!("[API] Deleting property {} from URL {}", property, url);
|
||||
let db = db.lock().await;
|
||||
match db.delete_property_by_url(&url, &property).await {
|
||||
Ok(_) => HttpResponse::Ok().finish(),
|
||||
Err(e) => {
|
||||
log!("[API] Delete error: {:?}", e);
|
||||
HttpResponse::InternalServerError().body(e.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
pub async fn get_items_by_url(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
query: web::Query<HashMap<String, String>>,
|
||||
) -> HttpResponse {
|
||||
let url = query.get("url").unwrap_or(&String::new()).to_string();
|
||||
let db = db.lock().await;
|
||||
match db.get_items_by_url(&url).await {
|
||||
Ok(items) => HttpResponse::Ok().json(items),
|
||||
Err(err) => {
|
||||
leptos::logging::error!("Failed to fetch items by URL: {:?}", err);
|
||||
HttpResponse::InternalServerError().body("Failed to fetch items by URL")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
pub async fn delete_item_by_url(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
url: web::Path<String>,
|
||||
item_id: web::Path<String>,
|
||||
) -> HttpResponse {
|
||||
let db = db.lock().await;
|
||||
match db.delete_item_by_url(&url, &item_id).await {
|
||||
Ok(_) => HttpResponse::Ok().body("Item deleted"),
|
||||
Err(err) => {
|
||||
leptos::logging::error!("Failed to delete item by URL: {:?}", err);
|
||||
HttpResponse::InternalServerError().body("Failed to delete item by URL")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
pub async fn delete_property_by_url(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
url: web::Path<String>,
|
||||
property: web::Path<String>,
|
||||
) -> HttpResponse {
|
||||
let db = db.lock().await;
|
||||
match db.delete_property_by_url(&url, &property).await {
|
||||
Ok(_) => HttpResponse::Ok().body("Property deleted"),
|
||||
Err(err) => {
|
||||
leptos::logging::error!("Failed to delete property by URL: {:?}", err);
|
||||
HttpResponse::InternalServerError().body("Failed to delete property by URL")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
pub async fn get_selected_properties(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
url: web::Path<String>,
|
||||
) -> HttpResponse {
|
||||
let db = db.lock().await;
|
||||
match db.get_selected_properties(&url).await {
|
||||
Ok(properties) => HttpResponse::Ok().json(properties),
|
||||
Err(e) => HttpResponse::InternalServerError().body(e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
pub async fn add_selected_property(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
url: web::Path<String>,
|
||||
property: web::Json<String>,
|
||||
) -> HttpResponse {
|
||||
let url = url.into_inner();
|
||||
let property = property.into_inner();
|
||||
|
||||
let db = db.lock().await;
|
||||
match db.add_selected_property(&url, &property).await {
|
||||
Ok(_) => HttpResponse::Ok().finish(),
|
||||
Err(e) => HttpResponse::InternalServerError().body(e.to_string())
|
||||
}
|
||||
}
|
69
src/app.rs
69
src/app.rs
|
@ -1,11 +1,13 @@
|
|||
use leptos::*;
|
||||
use leptos_meta::*;
|
||||
use crate::components::items_list::ItemsList;
|
||||
use leptos_router::*;
|
||||
use leptos::logging::log;
|
||||
use crate::components::items_list::{ItemsList, load_items_from_db};
|
||||
use crate::models::item::Item;
|
||||
use crate::nostr::NostrClient;
|
||||
use tokio::sync::mpsc;
|
||||
use leptos::spawn_local;
|
||||
use nostr_sdk::serde_json;
|
||||
// use tokio::sync::mpsc;
|
||||
// use crate::nostr::NostrClient;
|
||||
// use nostr_sdk::serde_json;
|
||||
|
||||
#[component]
|
||||
pub fn App() -> impl IntoView {
|
||||
|
@ -13,26 +15,49 @@ pub fn App() -> impl IntoView {
|
|||
|
||||
// Signal to manage the list of items
|
||||
let (items_signal, set_items) = create_signal(Vec::<Item>::new());
|
||||
let (tx, mut rx) = mpsc::channel::<String>(100);
|
||||
// let (tx, mut rx) = mpsc::channel::<String>(100);
|
||||
|
||||
// Nostr client subscription for items
|
||||
spawn_local(async move {
|
||||
let nostr_client = NostrClient::new("wss://relay.example.com").await.unwrap();
|
||||
nostr_client.subscribe_to_items(tx.clone()).await.unwrap();
|
||||
|
||||
while let Some(content) = rx.recv().await {
|
||||
if let Ok(item) = serde_json::from_str::<Item>(&content) {
|
||||
set_items.update(|items| items.push(item));
|
||||
}
|
||||
}
|
||||
});
|
||||
// // Nostr client subscription for items
|
||||
// spawn_local(async move {
|
||||
// let nostr_client = NostrClient::new("wss://relay.example.com").await.unwrap();
|
||||
// nostr_client.subscribe_to_items(tx.clone()).await.unwrap();
|
||||
|
||||
// while let Some(content) = rx.recv().await {
|
||||
// if let Ok(item) = serde_json::from_str::<Item>(&content) {
|
||||
// set_items.update(|items| items.push(item));
|
||||
// }
|
||||
// }
|
||||
// });
|
||||
view! {
|
||||
<Stylesheet href="/assets/style.css" />
|
||||
<Stylesheet href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.1.1/css/all.min.css" />
|
||||
<div>
|
||||
<h1>{ "CompareWare" }</h1>
|
||||
<ItemsList items=items_signal set_items=set_items />
|
||||
</div>
|
||||
<Router>
|
||||
<Routes>
|
||||
<Route path="/*url" view=move || {
|
||||
let location = use_location();
|
||||
let current_url = move || location.pathname.get();
|
||||
|
||||
// Proper async handling
|
||||
spawn_local({
|
||||
let current_url = current_url.clone();
|
||||
async move {
|
||||
match load_items_from_db(¤t_url()).await {
|
||||
Ok(items) => set_items.set(items),
|
||||
Err(e) => log!("Error loading items: {}", e),
|
||||
}
|
||||
}
|
||||
});
|
||||
view! {
|
||||
<Stylesheet href="/assets/style.css" />
|
||||
<Stylesheet href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.1.1/css/all.min.css" />
|
||||
<div>
|
||||
<h1>{ "CompareWare" }</h1>
|
||||
<ItemsList
|
||||
url=current_url()
|
||||
items=items_signal
|
||||
set_items=set_items />
|
||||
</div>
|
||||
}
|
||||
}/>
|
||||
</Routes>
|
||||
</Router>
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,74 +0,0 @@
|
|||
use leptos::*;
|
||||
use leptos_dom::ev::SubmitEvent;
|
||||
use leptos::logging::log;
|
||||
|
||||
#[component]
|
||||
pub fn ItemForm(on_submit: Box<dyn Fn(String, String, Vec<(String, String)>, String, u8)>) -> impl IntoView {
|
||||
let (name, set_name) = create_signal(String::new());
|
||||
let (description, set_description) = create_signal(String::new());
|
||||
let (tags, set_tags) = create_signal(Vec::<(String, String)>::new());
|
||||
let (tag_key, set_tag_key) = create_signal(String::new());
|
||||
let (tag_value, set_tag_value) = create_signal(String::new());
|
||||
let (review, set_review) = create_signal(String::new());
|
||||
let (rating, set_rating) = create_signal(5u8); // Default rating to 5
|
||||
|
||||
let add_tag = move |_| {
|
||||
if !tag_key.get().is_empty() && !tag_value.get().is_empty() {
|
||||
set_tags.update(|t| t.push((tag_key.get(), tag_value.get())));
|
||||
set_tag_key.set(String::new());
|
||||
set_tag_value.set(String::new());
|
||||
}
|
||||
};
|
||||
|
||||
let handle_submit = move |ev: SubmitEvent| {
|
||||
ev.prevent_default();
|
||||
|
||||
// Validation
|
||||
if name.get().is_empty() || description.get().is_empty() || rating.get() < 1 || rating.get() > 5 {
|
||||
log!("Validation failed: Check required fields.");
|
||||
return;
|
||||
}
|
||||
|
||||
on_submit(
|
||||
name.get(),
|
||||
description.get(),
|
||||
tags.get().clone(),
|
||||
review.get(),
|
||||
rating.get(),
|
||||
);
|
||||
|
||||
// Reset values
|
||||
set_name.set(String::new());
|
||||
set_description.set(String::new());
|
||||
set_tags.set(vec![]);
|
||||
set_review.set(String::new());
|
||||
set_rating.set(5);
|
||||
};
|
||||
|
||||
view! {
|
||||
<form on:submit=handle_submit>
|
||||
<input type="text" placeholder="Name" on:input=move |e| set_name.set(event_target_value(&e)) />
|
||||
<textarea placeholder="Description" on:input=move |e| set_description.set(event_target_value(&e)) />
|
||||
<h3>{ "Add Tags" }</h3>
|
||||
<input type="text" placeholder="Key" on:input=move |e| set_tag_key.set(event_target_value(&e)) />
|
||||
<input type="text" placeholder="Value" on:input=move |e| set_tag_value.set(event_target_value(&e)) />
|
||||
<button type="button" on:click=add_tag>{ "Add Tag" }</button>
|
||||
<ul>
|
||||
{tags.get().iter().map(|(key, value)| view! {
|
||||
<li>{ format!("{}: {}", key, value) }</li>
|
||||
}).collect::<Vec<_>>() }
|
||||
</ul>
|
||||
<h3>{ "Write a Review" }</h3>
|
||||
<textarea placeholder="Review" on:input=move |e| set_review.set(event_target_value(&e)) />
|
||||
<h3>{ "Rating (1-5)" }</h3>
|
||||
<input
|
||||
type="number"
|
||||
min="1"
|
||||
max="5"
|
||||
value={rating.get()}
|
||||
on:input=move |e| set_rating.set(event_target_value(&e).parse::<u8>().unwrap_or(5))
|
||||
/>
|
||||
<button type="submit">{ "Add Item" }</button>
|
||||
</form>
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,3 +1,2 @@
|
|||
pub mod item_form;
|
||||
pub mod items_list;
|
||||
pub mod editable_cell;
|
|
@ -1,48 +0,0 @@
|
|||
use leptos::*;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize, Clone, Debug)]
|
||||
struct WikidataResult {
|
||||
id: String,
|
||||
label: String,
|
||||
description: Option<String>,
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn WikidataLookup(
|
||||
query: String,
|
||||
on_select: impl Fn(WikidataResult) + 'static,
|
||||
) -> impl IntoView {
|
||||
let (suggestions, set_suggestions) = create_signal(Vec::new());
|
||||
|
||||
let fetch_suggestions = move |query: String| {
|
||||
spawn_local(async move {
|
||||
if query.is_empty() {
|
||||
set_suggestions(Vec::new());
|
||||
return;
|
||||
}
|
||||
let url = format!("https://www.wikidata.org/w/api.php?action=wbsearchentities&search={}&language=en&limit=5&format=json&origin=*", query);
|
||||
if let Ok(response) = reqwest::get(&url).await {
|
||||
if let Ok(data) = response.json::<WikidataResponse>().await {
|
||||
set_suggestions(data.search);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
create_effect(move || {
|
||||
fetch_suggestions(query.clone());
|
||||
});
|
||||
|
||||
view! {
|
||||
<ul>
|
||||
{suggestions.get().iter().map(|suggestion| {
|
||||
view! {
|
||||
<li on:click=move |_| on_select(suggestion.clone())>
|
||||
{format!("{} - {}", suggestion.label, suggestion.description.clone().unwrap_or_default())}
|
||||
</li>
|
||||
}
|
||||
}).collect::<Vec<_>>()}
|
||||
</ul>
|
||||
}
|
||||
}
|
784
src/db.rs
Normal file
784
src/db.rs
Normal file
|
@ -0,0 +1,784 @@
|
|||
#[cfg(feature = "ssr")]
|
||||
mod db_impl {
|
||||
use crate::models::item::Item;
|
||||
use leptos::logging;
|
||||
use leptos::logging::log;
|
||||
use rusqlite::{Connection, Error};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
use uuid::Uuid;
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tokio::runtime::Runtime;
|
||||
use uuid::Uuid;
|
||||
|
||||
// Helper function to create test database
|
||||
async fn create_test_db() -> Database {
|
||||
log!("[TEST] Creating in-memory test database");
|
||||
let db = Database::new(":memory:").unwrap();
|
||||
db.create_schema().await.unwrap();
|
||||
log!("[TEST] Database schema created");
|
||||
db
|
||||
}
|
||||
|
||||
// Test database schema creation
|
||||
#[tokio::test]
|
||||
async fn test_schema_creation() {
|
||||
log!("[TEST] Starting test_schema_creation");
|
||||
let db = create_test_db().await;
|
||||
|
||||
// Verify tables exist
|
||||
let conn = db.conn.lock().await;
|
||||
let mut stmt = conn
|
||||
.prepare("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
.unwrap();
|
||||
let tables: Vec<String> = stmt
|
||||
.query_map([], |row| row.get(0))
|
||||
.unwrap()
|
||||
.collect::<Result<_, _>>()
|
||||
.unwrap();
|
||||
|
||||
assert!(tables.contains(&"urls".to_string()));
|
||||
assert!(tables.contains(&"items".to_string()));
|
||||
assert!(tables.contains(&"properties".to_string()));
|
||||
assert!(tables.contains(&"item_properties".to_string()));
|
||||
assert!(tables.contains(&"selected_properties".to_string()));
|
||||
}
|
||||
|
||||
// Item Lifecycle Tests
|
||||
#[tokio::test]
|
||||
async fn test_full_item_lifecycle() {
|
||||
log!("[TEST] Starting test_full_item_lifecycle");
|
||||
let db = create_test_db().await;
|
||||
let test_url = "https://example.com";
|
||||
let test_item = Item {
|
||||
id: Uuid::new_v4().to_string(),
|
||||
name: "Test Item".into(),
|
||||
description: "Test Description".into(),
|
||||
wikidata_id: Some("Q123".into()),
|
||||
custom_properties: vec![
|
||||
("price".into(), "100".into()),
|
||||
("color".into(), "red".into()),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
};
|
||||
|
||||
// Test insertion
|
||||
log!("[TEST] Testing item insertion");
|
||||
db.insert_item_by_url(test_url, &test_item).await.unwrap();
|
||||
log!("[TEST] Item insertion - PASSED");
|
||||
|
||||
// Test retrieval
|
||||
log!("[TEST] Testing item retrieval");
|
||||
let items = db.get_items_by_url(test_url).await.unwrap();
|
||||
assert_eq!(items.len(), 1);
|
||||
let stored_item = &items[0];
|
||||
assert_eq!(stored_item.name, test_item.name);
|
||||
assert_eq!(stored_item.custom_properties.len(), 2);
|
||||
log!("[TEST] Item retrieval and validation - PASSED");
|
||||
|
||||
// Test update
|
||||
log!("[TEST] Testing item update");
|
||||
let mut updated_item = test_item.clone();
|
||||
updated_item.name = "Updated Name".into();
|
||||
db.insert_item_by_url(test_url, &updated_item)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Verify update
|
||||
let items = db.get_items_by_url(test_url).await.unwrap();
|
||||
assert_eq!(items[0].name, "Updated Name");
|
||||
log!("[TEST] Item update - PASSED");
|
||||
|
||||
// Test deletion
|
||||
log!("[TEST] Testing item deletion");
|
||||
db.delete_item_by_url(test_url, &test_item.id)
|
||||
.await
|
||||
.unwrap();
|
||||
let items = db.get_items_by_url(test_url).await.unwrap();
|
||||
assert!(items.is_empty());
|
||||
log!("[TEST] Item deletion - PASSED");
|
||||
log!("[TEST] test_full_item_lifecycle completed successfully");
|
||||
}
|
||||
|
||||
//URL Management Tests
|
||||
#[tokio::test]
|
||||
async fn test_url_management() {
|
||||
log!("[TEST] Starting test_url_management");
|
||||
let db = create_test_db().await;
|
||||
let test_url = "https://test.com";
|
||||
|
||||
// Test URL creation
|
||||
log!("[TEST] Testing URL creation");
|
||||
let url_id = db.insert_url(test_url).await.unwrap();
|
||||
assert!(url_id > 0);
|
||||
log!("[TEST] URL creation - PASSED");
|
||||
|
||||
// Test duplicate URL handling
|
||||
log!("[TEST] Testing duplicate URL handling");
|
||||
let duplicate_id = db.insert_url(test_url).await.unwrap();
|
||||
assert_eq!(url_id, duplicate_id);
|
||||
log!("[TEST] Duplicate URL handling - PASSED");
|
||||
|
||||
// Test URL retrieval
|
||||
log!("[TEST] Testing URL retrieval");
|
||||
let conn = db.conn.lock().await;
|
||||
let stored_url: String = conn
|
||||
.query_row("SELECT url FROM urls WHERE id = ?", [url_id], |row| {
|
||||
row.get(0)
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(stored_url, test_url);
|
||||
log!("[TEST] URL retrieval - PASSED");
|
||||
|
||||
log!("[TEST] test_url_management completed successfully");
|
||||
}
|
||||
|
||||
//property management tests
|
||||
#[tokio::test]
|
||||
async fn test_property_operations() {
|
||||
log!("[TEST] Starting test_property_operations");
|
||||
let db = create_test_db().await;
|
||||
let test_url = "https://props.com";
|
||||
let test_item = Item {
|
||||
id: Uuid::new_v4().to_string(),
|
||||
name: "Test Item".into(),
|
||||
description: "Test Description".into(),
|
||||
wikidata_id: Some("Q123".into()),
|
||||
custom_properties: vec![
|
||||
("price".into(), "100".into()),
|
||||
("color".into(), "red".into()),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
};
|
||||
// Test property creation
|
||||
log!("[TEST] Testing property creation");
|
||||
db.insert_item_by_url(test_url, &test_item).await.unwrap();
|
||||
|
||||
// Verify properties stored
|
||||
let items = db.get_items_by_url(test_url).await.unwrap();
|
||||
assert_eq!(items[0].custom_properties.len(), 2);
|
||||
log!("[TEST] Property creation - PASSED");
|
||||
|
||||
// Test property deletion
|
||||
log!("[TEST] Testing property deletion");
|
||||
db.delete_property_by_url(test_url, "price").await.unwrap();
|
||||
let items = db.get_items_by_url(test_url).await.unwrap();
|
||||
assert_eq!(items[0].custom_properties.len(), 1);
|
||||
assert!(!items[0].custom_properties.contains_key("price"));
|
||||
log!("[TEST] Property deletion - PASSED");
|
||||
|
||||
log!("[TEST] test_property_operations completed successfully");
|
||||
}
|
||||
|
||||
//selected properties test
|
||||
#[tokio::test]
|
||||
async fn test_selected_properties() {
|
||||
log!("[TEST] Starting test_selected_properties");
|
||||
let db = create_test_db().await;
|
||||
let test_url = "https://selected.com";
|
||||
|
||||
// Add test properties
|
||||
log!("[TEST] Adding selected properties");
|
||||
db.add_selected_property(test_url, "price").await.unwrap();
|
||||
db.add_selected_property(test_url, "weight").await.unwrap();
|
||||
|
||||
// Test retrieval
|
||||
log!("[TEST] Testing property retrieval");
|
||||
let props = db.get_selected_properties(test_url).await.unwrap();
|
||||
assert_eq!(props.len(), 2);
|
||||
assert!(props.contains(&"price".to_string()));
|
||||
assert!(props.contains(&"weight".to_string()));
|
||||
log!("[TEST] Property retrieval - PASSED");
|
||||
|
||||
// Test duplicate prevention
|
||||
log!("[TEST] Testing duplicate prevention");
|
||||
db.add_selected_property(test_url, "price").await.unwrap();
|
||||
let props = db.get_selected_properties(test_url).await.unwrap();
|
||||
assert_eq!(props.len(), 2); // No duplicate added
|
||||
log!("[TEST] Duplicate prevention - PASSED");
|
||||
|
||||
log!("[TEST] test_selected_properties completed successfully");
|
||||
}
|
||||
}
|
||||
|
||||
// Define a struct to represent a database connection
|
||||
#[derive(Debug)]
|
||||
pub struct Database {
|
||||
conn: Arc<Mutex<Connection>>,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
// Create a new database connection
|
||||
pub fn new(db_path: &str) -> Result<Self, Error> {
|
||||
let conn = Connection::open(db_path)?;
|
||||
logging::log!("Database connection established at: {}", db_path);
|
||||
Ok(Database {
|
||||
conn: Arc::new(Mutex::new(conn)),
|
||||
})
|
||||
}
|
||||
|
||||
// Create the database schema
|
||||
pub async fn create_schema(&self) -> Result<(), Error> {
|
||||
let conn = self.conn.lock().await;
|
||||
|
||||
// 1. Properties table
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE IF NOT EXISTS properties (
|
||||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
global_usage_count INTEGER DEFAULT 0
|
||||
);",
|
||||
)
|
||||
.map_err(|e| {
|
||||
eprintln!("Failed creating properties table: {}", e);
|
||||
e
|
||||
})?;
|
||||
|
||||
// 2. URLs table
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE IF NOT EXISTS urls (
|
||||
id INTEGER PRIMARY KEY,
|
||||
url TEXT NOT NULL UNIQUE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);",
|
||||
)
|
||||
.map_err(|e| {
|
||||
eprintln!("Failed creating urls table: {}", e);
|
||||
e
|
||||
})?;
|
||||
|
||||
// 3. Items table
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE IF NOT EXISTS items (
|
||||
id TEXT PRIMARY KEY,
|
||||
url_id INTEGER NOT NULL,
|
||||
wikidata_id TEXT,
|
||||
item_order INTEGER NOT NULL DEFAULT 0,
|
||||
FOREIGN KEY (url_id) REFERENCES urls(id) ON DELETE CASCADE
|
||||
);
|
||||
INSERT OR IGNORE INTO properties (name) VALUES
|
||||
('name'),
|
||||
('description');",
|
||||
)
|
||||
.map_err(|e| {
|
||||
eprintln!("Failed creating items table: {}", e);
|
||||
e
|
||||
})?;
|
||||
|
||||
// Check if the global_item_id column exists
|
||||
let mut stmt = conn.prepare("PRAGMA table_info(items);")?;
|
||||
let columns: Vec<String> = stmt
|
||||
.query_map([], |row| row.get(1))? // Column 1 contains the column names
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
if !columns.contains(&"global_item_id".to_string()) {
|
||||
conn.execute_batch(
|
||||
"ALTER TABLE items ADD COLUMN global_item_id TEXT;"
|
||||
)
|
||||
.map_err(|e| {
|
||||
eprintln!("Failed adding global_item_id to items table: {}", e);
|
||||
e
|
||||
})?;
|
||||
}
|
||||
|
||||
// 4. Table for selected properties
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE IF NOT EXISTS selected_properties (
|
||||
url_id INTEGER NOT NULL,
|
||||
property_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (url_id, property_id),
|
||||
FOREIGN KEY (url_id) REFERENCES urls(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (property_id) REFERENCES properties(id) ON DELETE CASCADE
|
||||
);",
|
||||
)
|
||||
.map_err(|e| {
|
||||
eprintln!("Failed creating properties table: {}", e);
|
||||
e
|
||||
})?;
|
||||
|
||||
// 5. Junction table for custom properties
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE IF NOT EXISTS item_properties (
|
||||
global_item_id TEXT NOT NULL,
|
||||
property_id INTEGER NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
PRIMARY KEY (global_item_id, property_id),
|
||||
FOREIGN KEY (global_item_id) REFERENCES items(global_item_id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (property_id) REFERENCES properties(id) ON DELETE CASCADE
|
||||
);",
|
||||
)
|
||||
.map_err(|e| {
|
||||
eprintln!("Failed creating item_properties table: {}", e);
|
||||
e
|
||||
})?;
|
||||
|
||||
// 6. Junction table for deleted properties
|
||||
conn.execute_batch(
|
||||
"CREATE TABLE IF NOT EXISTS deleted_properties (
|
||||
url_id INTEGER NOT NULL,
|
||||
global_item_id TEXT NOT NULL,
|
||||
property_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (url_id, global_item_id, property_id),
|
||||
FOREIGN KEY (url_id) REFERENCES urls(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (global_item_id) REFERENCES items(global_item_id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (property_id) REFERENCES properties(id) ON DELETE CASCADE
|
||||
);",
|
||||
).map_err(|e| {
|
||||
eprintln!("Failed creating item_properties table: {}", e);
|
||||
e
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Insert a new URL into the database
|
||||
pub async fn insert_url(&self, url: &str) -> Result<i64, Error> {
|
||||
let mut conn = self.conn.lock().await;
|
||||
let tx = conn.transaction()?;
|
||||
|
||||
// Use INSERT OR IGNORE to handle duplicates
|
||||
tx.execute("INSERT OR IGNORE INTO urls (url) VALUES (?)", [url])?;
|
||||
|
||||
// Get the URL ID whether it was inserted or already existed
|
||||
let url_id =
|
||||
tx.query_row("SELECT id FROM urls WHERE url = ?", [url], |row| row.get(0))?;
|
||||
|
||||
tx.commit()?;
|
||||
logging::log!("URL inserted: {}", url);
|
||||
Ok(url_id)
|
||||
}
|
||||
|
||||
pub async fn delete_item(&self, item_id: &str) -> Result<(), Error> {
|
||||
let conn = self.conn.lock().await;
|
||||
conn.execute("DELETE FROM items WHERE id = ?", &[item_id])?;
|
||||
logging::log!("Item deleted: {}", item_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_property(&self, property: &str) -> Result<(), Error> {
|
||||
let conn = self.conn.lock().await;
|
||||
let query = format!(
|
||||
"UPDATE items SET custom_properties = json_remove(custom_properties, '$.{}')",
|
||||
property
|
||||
);
|
||||
conn.execute(&query, []).map_err(|e| Error::from(e))?;
|
||||
logging::log!("Property deleted: {}", property);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Retrieve all items from the database
|
||||
pub async fn get_items(&self) -> Result<Vec<DbItem>, Error> {
|
||||
let conn = self.conn.lock().await;
|
||||
let mut stmt = conn.prepare("SELECT * FROM items;")?;
|
||||
let items = stmt.query_map([], |row| {
|
||||
Ok(DbItem {
|
||||
id: row.get(0)?,
|
||||
name: row.get(1)?,
|
||||
description: row.get(2)?,
|
||||
wikidata_id: row.get(3)?,
|
||||
})
|
||||
})?;
|
||||
let mut result = Vec::new();
|
||||
for item in items {
|
||||
result.push(item?);
|
||||
}
|
||||
logging::log!("Fetched {} items from the database", result.len()); // Log with Leptos
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
// Retrieve all items from the database for a specific URL
|
||||
pub async fn get_items_by_url(&self, url: &str) -> Result<Vec<Item>, Error> {
|
||||
let conn = self.conn.lock().await;
|
||||
let url_id: Option<i64> =
|
||||
match conn.query_row("SELECT id FROM urls WHERE url = ?", &[url], |row| {
|
||||
row.get(0)
|
||||
}) {
|
||||
Ok(id) => Some(id),
|
||||
Err(rusqlite::Error::QueryReturnedNoRows) => None,
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
let url_id = match url_id {
|
||||
Some(id) => id,
|
||||
None => return Ok(Vec::new()), // Return empty list if URL not found
|
||||
};
|
||||
|
||||
log!("Fetching items for URL '{}' (ID: {})", url, url_id);
|
||||
|
||||
let mut stmt = conn.prepare(
|
||||
"WITH ordered_items AS (
|
||||
SELECT
|
||||
i.id,
|
||||
i.wikidata_id,
|
||||
i.item_order,
|
||||
i.global_item_id
|
||||
FROM items i
|
||||
WHERE i.url_id = ?
|
||||
ORDER BY i.item_order ASC
|
||||
)
|
||||
SELECT
|
||||
oi.id,
|
||||
oi.wikidata_id,
|
||||
name_ip.value AS name,
|
||||
desc_ip.value AS description,
|
||||
json_group_object(p.name, ip.value) as custom_properties
|
||||
FROM ordered_items oi
|
||||
LEFT JOIN item_properties ip
|
||||
ON oi.global_item_id = ip.global_item_id
|
||||
AND ip.property_id NOT IN (
|
||||
SELECT property_id
|
||||
FROM deleted_properties
|
||||
WHERE url_id = ? AND global_item_id = oi.global_item_id
|
||||
)
|
||||
LEFT JOIN properties p
|
||||
ON ip.property_id = p.id
|
||||
LEFT JOIN item_properties name_ip
|
||||
ON oi.global_item_id = name_ip.global_item_id
|
||||
AND name_ip.property_id = (SELECT id FROM properties WHERE name = 'name')
|
||||
LEFT JOIN item_properties desc_ip
|
||||
ON oi.global_item_id = desc_ip.global_item_id
|
||||
AND desc_ip.property_id = (SELECT id FROM properties WHERE name = 'description')
|
||||
GROUP BY oi.id
|
||||
ORDER BY oi.item_order ASC"
|
||||
)?;
|
||||
|
||||
// Change from HashMap to Vec to preserve order
|
||||
let rows = stmt.query_map([url_id, url_id], |row| {
|
||||
let custom_props_json: String = row.get(4)?;
|
||||
let custom_properties: HashMap<String, String> = serde_json::from_str(&custom_props_json)
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(Item {
|
||||
id: row.get(0)?,
|
||||
name: row.get::<_, Option<String>>(2)?.unwrap_or_default(), // Handle NULL values for name
|
||||
description: row.get::<_, Option<String>>(3)?.unwrap_or_default(), // Handle NULL values for description
|
||||
wikidata_id: row.get(1)?,
|
||||
custom_properties,
|
||||
})
|
||||
})?;
|
||||
|
||||
let mut items = Vec::new();
|
||||
for row in rows {
|
||||
items.push(row?);
|
||||
}
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
async fn get_or_create_property(
|
||||
&self,
|
||||
tx: &mut rusqlite::Transaction<'_>,
|
||||
prop: &str,
|
||||
) -> Result<i64, Error> {
|
||||
match tx.query_row("SELECT id FROM properties WHERE name = ?", [prop], |row| {
|
||||
row.get::<_, i64>(0)
|
||||
}) {
|
||||
Ok(id) => Ok(id),
|
||||
Err(rusqlite::Error::QueryReturnedNoRows) => {
|
||||
tx.execute("INSERT INTO properties (name) VALUES (?)", [prop])?;
|
||||
Ok(tx.last_insert_rowid())
|
||||
}
|
||||
Err(e) => Err(e.into()),
|
||||
}
|
||||
}
|
||||
|
||||
// Insert a new item into the database for a specific URL
|
||||
pub async fn insert_item_by_url(&self, url: &str, item: &Item) -> Result<(), Error> {
|
||||
log!("[DB] Starting insert for URL: {}, Item: {}", url, item.id);
|
||||
|
||||
// 1. Check database lock acquisition
|
||||
let lock_start = std::time::Instant::now();
|
||||
let mut conn = self.conn.lock().await;
|
||||
log!("[DB] Lock acquired in {:?}", lock_start.elapsed());
|
||||
|
||||
// 2. Transaction handling
|
||||
log!("[DB] Starting transaction");
|
||||
let mut tx = conn.transaction().map_err(|e| {
|
||||
log!("[DB] Transaction start failed: {:?}", e);
|
||||
e
|
||||
})?;
|
||||
|
||||
// 3. URL handling
|
||||
log!("[DB] Checking URL existence: {}", url);
|
||||
let url_id = match tx.query_row("SELECT id FROM urls WHERE url = ?", [url], |row| {
|
||||
row.get::<_, i64>(0)
|
||||
}) {
|
||||
Ok(id) => {
|
||||
log!("[DB] Found existing URL ID: {}", id);
|
||||
id
|
||||
}
|
||||
Err(rusqlite::Error::QueryReturnedNoRows) => {
|
||||
log!("[DB] Inserting new URL");
|
||||
tx.execute("INSERT INTO urls (url) VALUES (?)", [url])?;
|
||||
let id = tx.last_insert_rowid();
|
||||
log!("[DB] Created URL ID: {}", id);
|
||||
id
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
|
||||
// 4. Item insertion
|
||||
let max_order: i32 = tx.query_row(
|
||||
"SELECT COALESCE(MAX(item_order), 0) FROM items WHERE url_id = ?",
|
||||
[url_id],
|
||||
|row| row.get(0),
|
||||
)?;
|
||||
|
||||
let global_item_id = match tx.query_row(
|
||||
"SELECT ip.global_item_id
|
||||
FROM item_properties ip
|
||||
JOIN properties p ON ip.property_id = p.id
|
||||
WHERE p.name = 'name' AND ip.value = ? LIMIT 1",
|
||||
[&item.name],
|
||||
|row| row.get::<_, String>(0),
|
||||
) {
|
||||
Ok(id) => id, // Reuse existing global_item_id
|
||||
Err(rusqlite::Error::QueryReturnedNoRows) => {
|
||||
let new_id = Uuid::new_v4().to_string(); // Generate a new global_item_id
|
||||
new_id
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
|
||||
log!("[DB] Upserting item");
|
||||
tx.execute(
|
||||
"INSERT INTO items (id, url_id, wikidata_id, item_order, global_item_id)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
url_id = excluded.url_id,
|
||||
wikidata_id = excluded.wikidata_id,
|
||||
global_item_id = excluded.global_item_id",
|
||||
rusqlite::params![
|
||||
&item.id,
|
||||
url_id,
|
||||
&item.wikidata_id,
|
||||
max_order + 1,
|
||||
&global_item_id
|
||||
],
|
||||
)?;
|
||||
log!("[DB] Item upserted successfully");
|
||||
|
||||
// property handling
|
||||
let core_properties = vec![
|
||||
("name", &item.name),
|
||||
("description", &item.description)
|
||||
];
|
||||
|
||||
for (prop, value) in core_properties.into_iter().chain(
|
||||
item.custom_properties.iter().map(|(k, v)| (k.as_str(), v))
|
||||
) {
|
||||
let prop_id = self.get_or_create_property(&mut tx, prop).await?;
|
||||
|
||||
tx.execute(
|
||||
"INSERT INTO item_properties (global_item_id, property_id, value)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(global_item_id, property_id) DO UPDATE SET
|
||||
value = excluded.value",
|
||||
rusqlite::params![&global_item_id, prop_id, value],
|
||||
)?;
|
||||
}
|
||||
|
||||
// Property synchronization
|
||||
log!("[DB] Synchronizing properties for item {}", item.id);
|
||||
let existing_props = {
|
||||
let mut stmt = tx.prepare(
|
||||
"SELECT p.name, ip.value
|
||||
FROM item_properties ip
|
||||
JOIN properties p ON ip.property_id = p.id
|
||||
WHERE ip.global_item_id = ?",
|
||||
)?;
|
||||
|
||||
let mapped_rows = stmt.query_map([&item.id], |row| {
|
||||
Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?))
|
||||
})?;
|
||||
|
||||
mapped_rows.collect::<Result<HashMap<String, String>, _>>()?
|
||||
};
|
||||
|
||||
// Include core properties in current_props check
|
||||
let mut current_props: HashSet<&str> = item.custom_properties.keys()
|
||||
.map(|s| s.as_str())
|
||||
.collect();
|
||||
current_props.insert("name");
|
||||
current_props.insert("description");
|
||||
|
||||
// Cleanup with core property protection
|
||||
for (existing_prop, _) in existing_props {
|
||||
if !current_props.contains(existing_prop.as_str())
|
||||
&& !["name", "description"].contains(&existing_prop.as_str())
|
||||
{
|
||||
log!("[DB] Removing deleted property {}", existing_prop);
|
||||
tx.execute(
|
||||
"DELETE FROM item_properties
|
||||
WHERE item_id = ?
|
||||
AND property_id = (SELECT id FROM properties WHERE name = ?)",
|
||||
rusqlite::params![&item.id, existing_prop],
|
||||
)?;
|
||||
}
|
||||
}
|
||||
tx.commit()?;
|
||||
log!("[DB] Transaction committed successfully");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Delete an item from the database for a specific URL
|
||||
pub async fn delete_item_by_url(&self, url: &str, item_id: &str) -> Result<(), Error> {
|
||||
let mut conn = self.conn.lock().await;
|
||||
let tx = conn.transaction()?;
|
||||
|
||||
// Get URL ID
|
||||
let url_id: i64 =
|
||||
tx.query_row("SELECT id FROM urls WHERE url = ?", [url], |row| row.get(0))?;
|
||||
|
||||
// Delete item and properties
|
||||
tx.execute(
|
||||
"DELETE FROM items WHERE id = ? AND url_id = ?",
|
||||
[item_id, &url_id.to_string()],
|
||||
)?;
|
||||
|
||||
tx.execute(
|
||||
"DELETE FROM item_properties WHERE global_item_id = ?",
|
||||
[item_id],
|
||||
)?;
|
||||
|
||||
tx.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Delete a property from the database for a specific URL
|
||||
pub async fn delete_property_by_url(&self, url: &str, property: &str) -> Result<(), Error> {
|
||||
let mut conn = self.conn.lock().await;
|
||||
let tx = conn.transaction()?;
|
||||
|
||||
// Get URL ID
|
||||
let url_id: i64 =
|
||||
tx.query_row("SELECT id FROM urls WHERE url = ?", [url], |row| row.get(0))?;
|
||||
|
||||
// Get property ID
|
||||
let property_id: i64 = tx.query_row(
|
||||
"SELECT id FROM properties WHERE name = ?",
|
||||
[property],
|
||||
|row| row.get(0),
|
||||
)?;
|
||||
|
||||
// Get all global_item_ids for this URL
|
||||
{
|
||||
let mut stmt = tx.prepare("SELECT global_item_id FROM items WHERE url_id = ?")?;
|
||||
let global_item_ids: Vec<String> = stmt
|
||||
.query_map([url_id], |row| row.get(0))?
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
// Insert into deleted_properties for each global_item_id
|
||||
for global_item_id in global_item_ids {
|
||||
tx.execute(
|
||||
"INSERT OR IGNORE INTO deleted_properties (url_id, global_item_id, property_id)
|
||||
VALUES (?, ?, ?)",
|
||||
rusqlite::params![url_id, global_item_id, property_id],
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
tx.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn add_selected_property(&self, url: &str, property: &str) -> Result<(), Error> {
|
||||
let mut conn = self.conn.lock().await;
|
||||
let tx = conn.transaction()?;
|
||||
|
||||
// Insert URL if it does not exists
|
||||
tx.execute("INSERT OR IGNORE INTO urls (url) VALUES (?)", [url])?;
|
||||
|
||||
// Get URL ID
|
||||
let url_id = tx.query_row("SELECT id FROM urls WHERE url = ?", [url], |row| {
|
||||
row.get::<_, i64>(0)
|
||||
})?;
|
||||
|
||||
// Get/Create property
|
||||
let prop_id = match tx.query_row(
|
||||
"SELECT id FROM properties WHERE name = ?",
|
||||
[property],
|
||||
|row| row.get::<_, i64>(0),
|
||||
) {
|
||||
Ok(id) => id,
|
||||
Err(_) => {
|
||||
tx.execute("INSERT INTO properties (name) VALUES (?)", [property])?;
|
||||
tx.last_insert_rowid()
|
||||
}
|
||||
};
|
||||
|
||||
// Insert into selected_properties
|
||||
tx.execute(
|
||||
"INSERT OR IGNORE INTO selected_properties (url_id, property_id) VALUES (?, ?)",
|
||||
[url_id, prop_id],
|
||||
)?;
|
||||
|
||||
tx.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_selected_properties(&self, url: &str) -> Result<Vec<String>, Error> {
|
||||
let conn = self.conn.lock().await;
|
||||
let mut stmt = conn.prepare(
|
||||
"SELECT p.name
|
||||
FROM selected_properties sp
|
||||
JOIN properties p ON sp.property_id = p.id
|
||||
JOIN urls u ON sp.url_id = u.id
|
||||
WHERE u.url = ?",
|
||||
)?;
|
||||
|
||||
let properties = stmt.query_map([url], |row| row.get(0))?;
|
||||
properties.collect()
|
||||
}
|
||||
|
||||
// function to log database state
|
||||
pub async fn debug_dump(&self) -> Result<(), Error> {
|
||||
let conn = self.conn.lock().await;
|
||||
log!("[DATABASE DEBUG] URLs:");
|
||||
let mut stmt = conn.prepare("SELECT id, url FROM urls")?;
|
||||
let urls = stmt.query_map([], |row| {
|
||||
Ok(format!(
|
||||
"ID: {}, URL: {}",
|
||||
row.get::<_, i64>(0)?,
|
||||
row.get::<_, String>(1)?
|
||||
))
|
||||
})?;
|
||||
for url in urls {
|
||||
log!("[DATABASE DEBUG] {}", url?);
|
||||
}
|
||||
|
||||
log!("[DATABASE DEBUG] Items:");
|
||||
let mut stmt = conn.prepare("SELECT id, name FROM items")?;
|
||||
let items = stmt.query_map([], |row| {
|
||||
Ok(format!(
|
||||
"ID: {}, Name: '{}'",
|
||||
row.get::<_, String>(0)?,
|
||||
row.get::<_, String>(1)?
|
||||
))
|
||||
})?;
|
||||
for item in items {
|
||||
log!("[DATABASE DEBUG] {}", item?);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Define a struct to represent an item in the database
|
||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||
pub struct DbItem {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub wikidata_id: Option<String>,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
pub use db_impl::{Database, DbItem};
|
|
@ -2,6 +2,9 @@ pub mod app;
|
|||
pub mod components;
|
||||
pub mod models;
|
||||
pub mod nostr;
|
||||
pub mod api;
|
||||
#[cfg(feature = "ssr")]
|
||||
pub mod db;
|
||||
|
||||
|
||||
#[cfg(feature = "hydrate")]
|
||||
|
|
117
src/main.rs
117
src/main.rs
|
@ -1,4 +1,11 @@
|
|||
#[cfg(feature = "ssr")]
|
||||
use actix_web::{web, HttpResponse, Responder};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
use compareware::db::Database;
|
||||
use compareware::api::{ItemRequest,create_item, get_items, get_selected_properties, add_selected_property};
|
||||
use compareware::models::item::Item;
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
use actix_files::Files;
|
||||
|
@ -6,33 +13,131 @@ async fn main() -> std::io::Result<()> {
|
|||
use leptos::*;
|
||||
use leptos_actix::{generate_route_list, LeptosRoutes};
|
||||
use compareware::app::*;
|
||||
|
||||
use compareware::db::Database;
|
||||
use compareware::api::{delete_item, delete_property}; // Import API handlers
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
// Initialize the database
|
||||
let db = Database::new("compareware.db").unwrap();
|
||||
db.create_schema().await.unwrap(); // Ensure the schema is created
|
||||
let db = Arc::new(Mutex::new(db)); // Wrap the database in an Arc<Mutex<T>> for shared state
|
||||
println!("Schema created successfully!");
|
||||
|
||||
// Load configuration
|
||||
let conf = get_configuration(None).await.unwrap();
|
||||
let addr = conf.leptos_options.site_addr;
|
||||
|
||||
|
||||
// Generate the list of routes in your Leptos App
|
||||
let routes = generate_route_list(App);
|
||||
println!("listening on http://{}", &addr);
|
||||
|
||||
// Start the Actix Web server
|
||||
HttpServer::new(move || {
|
||||
let leptos_options = &conf.leptos_options;
|
||||
let site_root = &leptos_options.site_root;
|
||||
let db = db.clone(); // Clone the Arc for each worker
|
||||
|
||||
|
||||
App::new()
|
||||
// serve JS/WASM/CSS from `pkg`
|
||||
.app_data(web::Data::new(db.clone()))
|
||||
// Register custom API routes BEFORE Leptos server functions
|
||||
.service(
|
||||
web::scope("/api")
|
||||
.service(
|
||||
web::scope("/urls/{url}")
|
||||
.route("/items", web::get().to(get_items_handler)) // GET items by URL
|
||||
.route("/items", web::post().to(create_item_handler)) // Create item for URL
|
||||
.route("/items/{item_id}", web::delete().to(delete_item)) // Delete item for URL
|
||||
.route("/properties", web::get().to(get_selected_properties_handler))
|
||||
.route("/properties", web::post().to(add_selected_property_handler))
|
||||
.route("/properties/{property}", web::delete().to(delete_property)) // Delete property for URL
|
||||
)
|
||||
)
|
||||
// Register server functions
|
||||
.route("/api/{tail:.*}", leptos_actix::handle_server_fns())
|
||||
// Serve JS/WASM/CSS from `pkg`
|
||||
.service(Files::new("/pkg", format!("{site_root}/pkg")))
|
||||
// serve other assets from the `assets` directory
|
||||
// Serve other assets from the `assets` directory
|
||||
.service(Files::new("/assets", site_root))
|
||||
// serve the favicon from /favicon.ico
|
||||
// Serve the favicon from /favicon.ico
|
||||
.service(favicon)
|
||||
// Register Leptos routes
|
||||
.leptos_routes(leptos_options.to_owned(), routes.to_owned(), App)
|
||||
// Pass Leptos options to the app
|
||||
.app_data(web::Data::new(leptos_options.to_owned()))
|
||||
//.wrap(middleware::Compress::default())
|
||||
//.wrap(middleware::Compress::default())
|
||||
// Pass the database as shared state
|
||||
.app_data(web::Data::new(db))
|
||||
// Register URL routing
|
||||
.service(web::resource("/").route(web::get().to(index)))
|
||||
.service(web::resource("/{url}").route(web::get().to(url_handler)))
|
||||
})
|
||||
.bind(&addr)?
|
||||
.run()
|
||||
.await
|
||||
}
|
||||
|
||||
// Handler to get items for a specific URL
|
||||
async fn get_items_handler(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
url: web::Path<String>,
|
||||
) -> impl Responder {
|
||||
get_items(db, web::Query(url.into_inner())).await
|
||||
}
|
||||
|
||||
// Handler to create an item for a specific URL
|
||||
async fn create_item_handler(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
url: web::Path<String>,
|
||||
item: web::Json<Item>,
|
||||
) -> impl Responder {
|
||||
let request = ItemRequest {
|
||||
url: url.into_inner(),
|
||||
item: item.into_inner()
|
||||
};
|
||||
create_item(db, web::Json(request)).await
|
||||
}
|
||||
|
||||
// // Handler to delete an item for a specific URL
|
||||
// async fn delete_item_handler(
|
||||
// db: web::Data<Arc<Mutex<Database>>>,
|
||||
// path: web::Path<(String, String)>,
|
||||
// ) -> impl Responder {
|
||||
// let (url, item_id) = path.into_inner();
|
||||
// delete_item_by_url(db, web::Path::from(url), web::Path::from(item_id)).await
|
||||
// }
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
async fn get_selected_properties_handler(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
url: web::Path<String>,
|
||||
) -> impl Responder {
|
||||
get_selected_properties(db, url).await
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
async fn add_selected_property_handler(
|
||||
db: web::Data<Arc<Mutex<Database>>>,
|
||||
url: web::Path<String>,
|
||||
property: web::Json<String>,
|
||||
) -> impl Responder {
|
||||
add_selected_property(db, url, property).await
|
||||
}
|
||||
#[cfg(feature = "ssr")]
|
||||
// Define the index handler
|
||||
async fn index() -> HttpResponse {
|
||||
HttpResponse::Ok().body("Welcome to CompareWare!")
|
||||
}
|
||||
#[cfg(feature = "ssr")]
|
||||
// Define the URL handler
|
||||
async fn url_handler(url: web::Path<String>) -> HttpResponse {
|
||||
let url = url.into_inner();
|
||||
// TO DO: Implement URL-based content storage and editing functionality
|
||||
HttpResponse::Ok().body(format!("You are viewing the content at {}", url))
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
#[actix_web::get("favicon.ico")]
|
||||
async fn favicon(
|
||||
|
@ -63,4 +168,4 @@ pub fn main() {
|
|||
console_error_panic_hook::set_once();
|
||||
|
||||
leptos::mount_to_body(App);
|
||||
}
|
||||
}
|
|
@ -7,13 +7,6 @@ pub struct Item {
|
|||
pub id: String,
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub reviews: Vec<ReviewWithRating>,
|
||||
pub wikidata_id: Option<String>,
|
||||
pub custom_properties: HashMap<String, String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct ReviewWithRating {
|
||||
pub content: String,
|
||||
pub rating: u8, // Ratings from 1 to 5
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue