Compare commits
4 commits
af921088f9
...
4bfd47d8c4
Author | SHA1 | Date | |
---|---|---|---|
4bfd47d8c4 | |||
94ed4c46b9 | |||
25b3128181 | |||
23cd674e31 |
3 changed files with 160 additions and 146 deletions
7
Cargo.lock
generated
7
Cargo.lock
generated
|
@ -785,6 +785,7 @@ dependencies = [
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"thiserror 2.0.9",
|
"thiserror 2.0.9",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"urlencoding",
|
||||||
"uuid",
|
"uuid",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
"wasm-bindgen-futures",
|
"wasm-bindgen-futures",
|
||||||
|
@ -3400,6 +3401,12 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "urlencoding"
|
||||||
|
version = "2.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "utf-8"
|
name = "utf-8"
|
||||||
version = "0.7.6"
|
version = "0.7.6"
|
||||||
|
|
|
@ -32,6 +32,7 @@ thiserror = "2.0.9"
|
||||||
zerofrom = "0.1"
|
zerofrom = "0.1"
|
||||||
mio = "0.8"
|
mio = "0.8"
|
||||||
chrono = "0.4"
|
chrono = "0.4"
|
||||||
|
urlencoding = "2.1.2"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["ssr"]
|
default = ["ssr"]
|
||||||
|
|
|
@ -8,7 +8,9 @@ use crate::models::item::Item;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use wasm_bindgen::JsCast;
|
use wasm_bindgen::JsCast;
|
||||||
use chrono::{DateTime, Utc};
|
use urlencoding::encode;
|
||||||
|
use gloo_net::http::Request;
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
#[derive(Deserialize, Clone, Debug)]
|
#[derive(Deserialize, Clone, Debug)]
|
||||||
struct WikidataSuggestion {
|
struct WikidataSuggestion {
|
||||||
|
@ -41,11 +43,11 @@ pub fn ItemsList(
|
||||||
// State to manage dynamic property names
|
// State to manage dynamic property names
|
||||||
let (custom_properties, set_custom_properties) = create_signal(Vec::<String>::new());
|
let (custom_properties, set_custom_properties) = create_signal(Vec::<String>::new());
|
||||||
|
|
||||||
// state to manage suggestions visibility
|
// State to manage suggestions visibility
|
||||||
let (show_suggestions, set_show_suggestions) = create_signal(HashMap::<String, bool>::new());
|
let (show_suggestions, set_show_suggestions) = create_signal(HashMap::<String, bool>::new());
|
||||||
|
|
||||||
// cache to store fetched properties
|
// cache to store fetched properties
|
||||||
let (fetched_properties, set_fetched_properties) = create_signal(HashMap::<String, String>::new());
|
let (fetched_properties, set_fetched_properties) = create_signal(HashMap::<String, HashMap<String, String>>::new());
|
||||||
|
|
||||||
// Signal to store the fetched property labels
|
// Signal to store the fetched property labels
|
||||||
let (property_labels, set_property_labels) = create_signal(HashMap::<String, String>::new());
|
let (property_labels, set_property_labels) = create_signal(HashMap::<String, String>::new());
|
||||||
|
@ -99,7 +101,7 @@ pub fn ItemsList(
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log!("Items after loading: {:?}", items.get());
|
// log!("Items after loading: {:?}", items.get());
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
log!("Error loading items: {}", err);
|
log!("Error loading items: {}", err);
|
||||||
|
@ -161,7 +163,7 @@ pub fn ItemsList(
|
||||||
match response {
|
match response {
|
||||||
Ok(resp) => {
|
Ok(resp) => {
|
||||||
if resp.status() == 200 {
|
if resp.status() == 200 {
|
||||||
log!("Item saved to database: {:?}", item_to_send);
|
// log!("Item saved to database: {:?}", item_to_send);
|
||||||
} else {
|
} else {
|
||||||
log!("Failed to save item: {}", resp.status_text());
|
log!("Failed to save item: {}", resp.status_text());
|
||||||
}
|
}
|
||||||
|
@ -185,7 +187,7 @@ pub fn ItemsList(
|
||||||
.await
|
.await
|
||||||
.map_err(|err| format!("Failed to parse items: {:?}", err))?;
|
.map_err(|err| format!("Failed to parse items: {:?}", err))?;
|
||||||
|
|
||||||
log!("Deserialized DB items: {:?}", db_items);
|
// log!("Deserialized DB items: {:?}", db_items);
|
||||||
|
|
||||||
// Convert DbItem to Item
|
// Convert DbItem to Item
|
||||||
let items = db_items
|
let items = db_items
|
||||||
|
@ -208,14 +210,14 @@ pub fn ItemsList(
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
log!("Converted items: {:?}", items);
|
// log!("Converted items: {:?}", items);
|
||||||
Ok(items)
|
Ok(items)
|
||||||
} else {
|
} else {
|
||||||
Err(format!("Failed to fetch items: {}", response.status_text()))
|
Err(format!("Failed to fetch items: {}", response.status_text()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove an item
|
// Function to remove an item
|
||||||
let remove_item = move |index: usize| {
|
let remove_item = move |index: usize| {
|
||||||
let item_id = items.get()[index].id.clone();
|
let item_id = items.get()[index].id.clone();
|
||||||
spawn_local(async move {
|
spawn_local(async move {
|
||||||
|
@ -238,6 +240,7 @@ pub fn ItemsList(
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Function to remove a property
|
||||||
let remove_property = move |property: String| {
|
let remove_property = move |property: String| {
|
||||||
spawn_local(async move {
|
spawn_local(async move {
|
||||||
let response = gloo_net::http::Request::delete(&format!("/api/properties/{}", property))
|
let response = gloo_net::http::Request::delete(&format!("/api/properties/{}", property))
|
||||||
|
@ -267,10 +270,11 @@ pub fn ItemsList(
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// State to store Wikidata suggestions
|
||||||
let (wikidata_suggestions, set_wikidata_suggestions) = create_signal(HashMap::<String, Vec<WikidataSuggestion>>::new());
|
let (wikidata_suggestions, set_wikidata_suggestions) = create_signal(HashMap::<String, Vec<WikidataSuggestion>>::new());
|
||||||
|
|
||||||
// Fetch Wikidata suggestions
|
// Function to fetch Wikidata suggestions
|
||||||
let fetch_wikidata_suggestions = move |key:String, query: String| {
|
let fetch_wikidata_suggestions = move |key: String, query: String| {
|
||||||
log!("Fetching suggestions for key: {}, query: {}", key, query);
|
log!("Fetching suggestions for key: {}, query: {}", key, query);
|
||||||
spawn_local(async move {
|
spawn_local(async move {
|
||||||
if query.is_empty() {
|
if query.is_empty() {
|
||||||
|
@ -288,9 +292,7 @@ pub fn ItemsList(
|
||||||
match gloo_net::http::Request::get(&url).send().await {
|
match gloo_net::http::Request::get(&url).send().await {
|
||||||
Ok(response) => {
|
Ok(response) => {
|
||||||
if let Ok(data) = response.json::<WikidataResponse>().await {
|
if let Ok(data) = response.json::<WikidataResponse>().await {
|
||||||
log!("Fetching suggestions for key: {}, query: {}", key, query);
|
|
||||||
set_wikidata_suggestions.update(|suggestions| {
|
set_wikidata_suggestions.update(|suggestions| {
|
||||||
log!("Updated suggestions: {:?}", suggestions);
|
|
||||||
suggestions.insert(key, data.search);
|
suggestions.insert(key, data.search);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -300,157 +302,155 @@ pub fn ItemsList(
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// function to handle different nested JSON types for property values
|
|
||||||
async fn parse_property_value(value: &serde_json::Value) -> String {
|
|
||||||
match value {
|
|
||||||
serde_json::Value::String(text) => text.clone(),
|
|
||||||
serde_json::Value::Number(num) => num.to_string(),
|
|
||||||
serde_json::Value::Object(map) => {
|
|
||||||
|
|
||||||
// Handle time values
|
|
||||||
if let Some(time_value) = map.get("time") {
|
|
||||||
let precision = map.get("precision").and_then(|p| p.as_u64()).unwrap_or(11);
|
|
||||||
|
|
||||||
if let Some(time_str) = time_value.as_str() {
|
|
||||||
if let Ok(parsed_date) = chrono::DateTime::parse_from_rfc3339(time_str.trim_start_matches('+')) {
|
|
||||||
return match precision {
|
|
||||||
9 => parsed_date.format("%Y").to_string(), // Year precision
|
|
||||||
10 => parsed_date.format("%Y-%m").to_string(), // Month precision
|
|
||||||
11 => parsed_date.format("%Y-%m-%d").to_string(), // Day precision
|
|
||||||
_ => parsed_date.format("%Y-%m-%d %H:%M:%S").to_string(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return "Invalid time format".to_string();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle Wikidata entity references
|
|
||||||
if let Some(id) = map.get("id") {
|
|
||||||
// Handle Wikidata entity references
|
|
||||||
let entity_id = id.as_str().unwrap_or("");
|
|
||||||
if entity_id.starts_with("Q") {
|
|
||||||
return fetch_entity_label(entity_id).await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
serde_json::to_string(map).unwrap_or("Complex Object".to_string())
|
|
||||||
}
|
|
||||||
_ => "Unsupported data type".to_string(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn fetch_entity_label(entity_id: &str) -> String {
|
|
||||||
let url = format!(
|
|
||||||
"https://www.wikidata.org/w/api.php?action=wbgetentities&ids={}&props=labels&languages=en&format=json&origin=*",
|
|
||||||
entity_id
|
|
||||||
);
|
|
||||||
|
|
||||||
match gloo_net::http::Request::get(&url).send().await {
|
|
||||||
Ok(response) => {
|
|
||||||
if let Ok(data) = response.json::<serde_json::Value>().await {
|
|
||||||
if let Some(entity) = data["entities"][entity_id]["labels"]["en"]["value"].as_str() {
|
|
||||||
return entity.to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => log!("Error fetching entity label: {:?}", err),
|
|
||||||
}
|
|
||||||
|
|
||||||
entity_id.to_string() // Fallback to entity ID if label fetch fails
|
|
||||||
}
|
|
||||||
|
|
||||||
//function to fetch properties
|
//function to fetch properties
|
||||||
async fn fetch_item_properties(wikidata_id: &str, set_fetched_properties: WriteSignal<HashMap<String, String>>, set_property_labels: WriteSignal<HashMap<String, String>>,) -> HashMap<String, String> {
|
async fn fetch_item_properties(wikidata_id: &str) -> HashMap<String, String> {
|
||||||
let url = format!(
|
let sparql_query = format!(
|
||||||
"https://www.wikidata.org/w/api.php?action=wbgetentities&ids={}&format=json&props=claims&origin=*",
|
r#"
|
||||||
|
SELECT ?propLabel ?value ?valueLabel WHERE {{
|
||||||
|
wd:{} ?prop ?statement.
|
||||||
|
?statement ?ps ?value.
|
||||||
|
?property wikibase:claim ?prop.
|
||||||
|
?property wikibase:statementProperty ?ps.
|
||||||
|
SERVICE wikibase:label {{ bd:serviceParam wikibase:language "en". }}
|
||||||
|
}}
|
||||||
|
"#,
|
||||||
wikidata_id
|
wikidata_id
|
||||||
);
|
);
|
||||||
|
|
||||||
match gloo_net::http::Request::get(&url).send().await {
|
let url = format!(
|
||||||
|
"https://query.wikidata.org/sparql?query={}&format=json",
|
||||||
|
urlencoding::encode(&sparql_query)
|
||||||
|
);
|
||||||
|
|
||||||
|
match gloo_net::http::Request::get(&url)
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
{
|
||||||
Ok(response) => {
|
Ok(response) => {
|
||||||
if let Ok(data) = response.json::<serde_json::Value>().await {
|
if let Ok(data) = response.json::<serde_json::Value>().await {
|
||||||
if let Some(entities) = data["entities"].as_object() {
|
|
||||||
if let Some(entity) = entities.get(wikidata_id) {
|
|
||||||
if let Some(claims) = entity["claims"].as_object() {
|
|
||||||
let mut result = HashMap::new();
|
let mut result = HashMap::new();
|
||||||
|
if let Some(bindings) = data["results"]["bindings"].as_array() {
|
||||||
|
for binding in bindings {
|
||||||
|
let prop_label = binding["propLabel"]["value"].as_str().unwrap_or("").to_string();
|
||||||
|
let prop_label = prop_label.replace("http://www.wikidata.org/prop/", "");
|
||||||
|
let value_label = binding["valueLabel"]["value"].as_str().unwrap_or("").to_string();
|
||||||
|
result.insert(prop_label, value_label);
|
||||||
|
log!("result: {:?}", result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
|
||||||
for (property, values) in claims {
|
} else {
|
||||||
for value_entry in values.as_array().unwrap_or(&vec![]) {
|
|
||||||
if let Some(datavalue) = value_entry["mainsnak"]["datavalue"].get("value") {
|
|
||||||
let parsed_value = parse_property_value(datavalue).await;
|
|
||||||
result.insert(property.clone(), parsed_value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch labels for the properties
|
|
||||||
let property_ids = result.keys().cloned().collect::<Vec<_>>();
|
|
||||||
let labels = fetch_property_labels(property_ids).await;
|
|
||||||
set_property_labels.update(|labels_map| {
|
|
||||||
for (key, value) in labels {
|
|
||||||
labels_map.insert(key, value);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Update fetched properties
|
|
||||||
set_fetched_properties.update(|properties| {
|
|
||||||
for (key, val) in result.clone() {
|
|
||||||
properties.insert(key.clone(), val.clone());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => log!("Error fetching item properties: {:?}", err),
|
|
||||||
}
|
|
||||||
|
|
||||||
HashMap::new()
|
HashMap::new()
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
Err(_) => HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async fn fetch_property_labels(property_ids: Vec<String>) -> HashMap<String, String> {
|
async fn fetch_property_labels(property_ids: Vec<String>) -> HashMap<String, String> {
|
||||||
let mut property_labels = HashMap::new();
|
log!("Fetching property labels for properties: {:?}", property_ids);
|
||||||
|
|
||||||
// Construct the API URL to fetch labels for multiple properties
|
// Remove the "http://www.wikidata.org/prop/" prefix from property IDs
|
||||||
let url = format!(
|
let property_ids: Vec<String> = property_ids
|
||||||
"https://www.wikidata.org/w/api.php?action=wbgetentities&ids={}&props=labels&format=json&languages=en&origin=*",
|
.into_iter()
|
||||||
property_ids.join("|")
|
.map(|id| id.replace("http://www.wikidata.org/prop/", ""))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let property_ids_str = property_ids.join(" wd:");
|
||||||
|
let sparql_query = format!(
|
||||||
|
r#"
|
||||||
|
SELECT ?prop ?propLabel WHERE {{
|
||||||
|
VALUES ?prop {{ wd:{} }}
|
||||||
|
SERVICE wikibase:label {{ bd:serviceParam wikibase:language "en". }}
|
||||||
|
}}
|
||||||
|
"#,
|
||||||
|
property_ids_str
|
||||||
);
|
);
|
||||||
|
|
||||||
match gloo_net::http::Request::get(&url).send().await {
|
let url = format!(
|
||||||
|
"https://query.wikidata.org/sparql?query={}&format=json",
|
||||||
|
urlencoding::encode(&sparql_query)
|
||||||
|
);
|
||||||
|
log!("Sending request to URL: {}", url);
|
||||||
|
|
||||||
|
match gloo_net::http::Request::get(&url)
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
{
|
||||||
Ok(response) => {
|
Ok(response) => {
|
||||||
if let Ok(data) = response.json::<serde_json::Value>().await {
|
log!("Received response from Wikidata. Status: {}", response.status());
|
||||||
if let Some(entities) = data["entities"].as_object() {
|
if response.status() != 200 {
|
||||||
for (property_id, entity) in entities {
|
log!("Error: Unexpected status code {}", response.status());
|
||||||
if let Some(label) = entity["labels"]["en"]["value"].as_str() {
|
return HashMap::new();
|
||||||
property_labels.insert(property_id.clone(), label.to_string());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => log!("Error fetching property labels: {:?}", err),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
property_labels
|
match response.text().await {
|
||||||
|
Ok(text) => {
|
||||||
|
log!("Response body: {}", text);
|
||||||
|
match serde_json::from_str::<serde_json::Value>(&text) {
|
||||||
|
Ok(data) => {
|
||||||
|
log!("Successfully parsed response from Wikidata");
|
||||||
|
let mut result = HashMap::new();
|
||||||
|
if let Some(bindings) = data["results"]["bindings"].as_array() {
|
||||||
|
log!("Found {} bindings in response", bindings.len());
|
||||||
|
for (i, binding) in bindings.iter().enumerate() {
|
||||||
|
if let (Some(prop), Some(label)) = (
|
||||||
|
binding["prop"]["value"].as_str(),
|
||||||
|
binding["propLabel"]["value"].as_str()
|
||||||
|
) {
|
||||||
|
let prop_id = prop.split('/').last().unwrap_or("").to_string();
|
||||||
|
result.insert(prop_id.clone(), label.to_string());
|
||||||
|
log!("Processed binding {}: prop_id = {}, label = {}", i, prop_id, label);
|
||||||
|
} else {
|
||||||
|
log!("Warning: Binding {} is missing prop or propLabel", i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log!("Warning: No bindings found in the response");
|
||||||
|
}
|
||||||
|
log!("Fetched {} property labels", result.len());
|
||||||
|
result
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
log!("Error parsing response from Wikidata: {:?}", e);
|
||||||
|
HashMap::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
log!("Error reading response body: {:?}", e);
|
||||||
|
HashMap::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
log!("Error fetching property labels from Wikidata: {:?}", e);
|
||||||
|
HashMap::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add a new custom property
|
// Add a new custom property
|
||||||
let add_property = move |property: String| {
|
let add_property = move |property: String| {
|
||||||
|
// Normalize the property ID
|
||||||
|
let normalized_property = property.replace("http://www.wikidata.org/prop/", "");
|
||||||
|
|
||||||
set_custom_properties.update(|props| {
|
set_custom_properties.update(|props| {
|
||||||
if !props.contains(&property) && !property.is_empty() {
|
if !props.contains(&normalized_property) && !normalized_property.is_empty() {
|
||||||
props.push(property.clone());
|
props.push(normalized_property.clone());
|
||||||
|
|
||||||
//update the selected_properties state when a new property is added
|
//update the selected_properties state when a new property is added
|
||||||
set_selected_properties.update(|selected| {
|
set_selected_properties.update(|selected| {
|
||||||
selected.insert(property.clone(), true);
|
selected.insert(normalized_property.clone(), true);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Ensure the grid updates reactively
|
// Ensure the grid updates reactively
|
||||||
set_items.update(|items| {
|
set_items.update(|items| {
|
||||||
for item in items {
|
for item in items {
|
||||||
item.custom_properties.entry(property.clone()).or_insert_with(|| "".to_string());
|
item.custom_properties.entry(normalized_property.clone()).or_insert_with(|| "".to_string());
|
||||||
|
|
||||||
// Save the updated item to the database
|
// Save the updated item to the database
|
||||||
let item_clone = item.clone();
|
let item_clone = item.clone();
|
||||||
|
@ -461,12 +461,14 @@ pub fn ItemsList(
|
||||||
});
|
});
|
||||||
|
|
||||||
// Fetch the property label
|
// Fetch the property label
|
||||||
let property_id = property.clone();
|
let property_id = normalized_property.clone();
|
||||||
spawn_local(async move {
|
spawn_local(async move {
|
||||||
let labels = fetch_property_labels(vec![property_id.clone()]).await;
|
let labels = fetch_property_labels(vec![property_id.clone()]).await;
|
||||||
|
log!("Fetched labels: {:?}", labels);
|
||||||
set_property_labels.update(|labels_map| {
|
set_property_labels.update(|labels_map| {
|
||||||
if let Some(label) = labels.get(&property_id) {
|
for (key, value) in labels {
|
||||||
labels_map.insert(property_id, label.clone());
|
log!("Inserting label: {} -> {}", key, value);
|
||||||
|
labels_map.insert(key, value);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -481,8 +483,16 @@ pub fn ItemsList(
|
||||||
let set_property_labels = set_property_labels.clone();
|
let set_property_labels = set_property_labels.clone();
|
||||||
let property_clone = property.clone();
|
let property_clone = property.clone();
|
||||||
spawn_local(async move {
|
spawn_local(async move {
|
||||||
let properties = fetch_item_properties(&wikidata_id, set_fetched_properties, set_property_labels).await;
|
let properties = fetch_item_properties(&wikidata_id).await;
|
||||||
// log!("Fetched properties for Wikidata ID {}: {:?}", wikidata_id, properties);
|
// Update fetched properties and property labels
|
||||||
|
set_fetched_properties.update(|fp| {
|
||||||
|
fp.insert(wikidata_id.clone(), properties.clone());
|
||||||
|
});
|
||||||
|
set_property_labels.update(|pl| {
|
||||||
|
for (key, value) in properties.iter() {
|
||||||
|
pl.entry(key.clone()).or_insert_with(|| value.clone());
|
||||||
|
}
|
||||||
|
});
|
||||||
if let Some(value) = properties.get(&property_clone) {
|
if let Some(value) = properties.get(&property_clone) {
|
||||||
set_items.update(|items| {
|
set_items.update(|items| {
|
||||||
if let Some(item) = items.iter_mut().find(|item| item.wikidata_id.as_ref().unwrap() == &wikidata_id) {
|
if let Some(item) = items.iter_mut().find(|item| item.wikidata_id.as_ref().unwrap() == &wikidata_id) {
|
||||||
|
@ -512,7 +522,7 @@ pub fn ItemsList(
|
||||||
let set_fetched_properties = set_fetched_properties.clone();
|
let set_fetched_properties = set_fetched_properties.clone();
|
||||||
let set_property_labels = set_property_labels.clone();
|
let set_property_labels = set_property_labels.clone();
|
||||||
spawn_local(async move {
|
spawn_local(async move {
|
||||||
let properties = fetch_item_properties(&wikidata_id, set_fetched_properties, set_property_labels).await;
|
let properties = fetch_item_properties(&wikidata_id).await;
|
||||||
log!("Fetched properties for index {}: {:?}", index, properties);
|
log!("Fetched properties for index {}: {:?}", index, properties);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -654,7 +664,7 @@ pub fn ItemsList(
|
||||||
let set_fetched_properties = set_fetched_properties.clone();
|
let set_fetched_properties = set_fetched_properties.clone();
|
||||||
let set_property_labels = set_property_labels.clone();
|
let set_property_labels = set_property_labels.clone();
|
||||||
spawn_local(async move {
|
spawn_local(async move {
|
||||||
let properties = fetch_item_properties(&wikidata_id, set_fetched_properties, set_property_labels).await;
|
let properties = fetch_item_properties(&wikidata_id).await;
|
||||||
// log!("Fetched properties for Wikidata ID {}: {:?}", wikidata_id, properties);
|
// log!("Fetched properties for Wikidata ID {}: {:?}", wikidata_id, properties);
|
||||||
|
|
||||||
// Populate the custom properties for the new item
|
// Populate the custom properties for the new item
|
||||||
|
@ -718,11 +728,11 @@ pub fn ItemsList(
|
||||||
// Dynamically adding custom properties as columns
|
// Dynamically adding custom properties as columns
|
||||||
{move || {
|
{move || {
|
||||||
let custom_props = custom_properties.get().clone();
|
let custom_props = custom_properties.get().clone();
|
||||||
log!("Rendering custom properties: {:?}", custom_props);
|
|
||||||
custom_props.into_iter().map(move |property| {
|
custom_props.into_iter().map(move |property| {
|
||||||
let property_clone = property.clone();
|
let normalized_property = property.replace("http://www.wikidata.org/prop/", "");
|
||||||
let property_label = property_labels.get().get(&property_clone).cloned().unwrap_or_else(|| property_clone.clone());
|
let property_label = property_labels.get().get(&normalized_property).cloned().unwrap_or_else(|| normalized_property.clone());
|
||||||
let property_clone_for_button = property_clone.clone();
|
log!("Rendering property: {} -> {}", normalized_property, property_label);
|
||||||
|
let property_clone_for_button = normalized_property.clone();
|
||||||
view! {
|
view! {
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
|
@ -744,7 +754,7 @@ pub fn ItemsList(
|
||||||
}>{ "Delete" }</button>
|
}>{ "Delete" }</button>
|
||||||
</td>
|
</td>
|
||||||
{move || {
|
{move || {
|
||||||
let property_clone_for_cells = property_clone.clone();
|
let property_clone_for_cells = normalized_property.clone();
|
||||||
items.get().iter().enumerate().map(move |(index, item)| {
|
items.get().iter().enumerate().map(move |(index, item)| {
|
||||||
let property_clone_for_closure = property_clone_for_cells.clone();
|
let property_clone_for_closure = property_clone_for_cells.clone();
|
||||||
view! {
|
view! {
|
||||||
|
@ -756,10 +766,8 @@ pub fn ItemsList(
|
||||||
focused_cell=focused_cell
|
focused_cell=focused_cell
|
||||||
set_focused_cell=set_focused_cell.clone()
|
set_focused_cell=set_focused_cell.clone()
|
||||||
on_focus=Some(Callback::new(move |_| {
|
on_focus=Some(Callback::new(move |_| {
|
||||||
log!("Custom property input focused");
|
|
||||||
}))
|
}))
|
||||||
on_blur=Some(Callback::new(move |_| {
|
on_blur=Some(Callback::new(move |_| {
|
||||||
log!("Custom property input blurred");
|
|
||||||
}))
|
}))
|
||||||
input_type=InputType::TextArea
|
input_type=InputType::TextArea
|
||||||
/>
|
/>
|
||||||
|
@ -792,13 +800,11 @@ pub fn ItemsList(
|
||||||
} />
|
} />
|
||||||
<datalist id="properties">
|
<datalist id="properties">
|
||||||
{move || {
|
{move || {
|
||||||
let properties = fetched_properties.get().clone();
|
|
||||||
let property_labels = property_labels.get().clone();
|
let property_labels = property_labels.get().clone();
|
||||||
properties.into_iter().map(|(key, _)| {
|
property_labels.into_iter().map(|(property, label)| {
|
||||||
let key_clone = key.clone();
|
let property_clone = property.clone();
|
||||||
let label = property_labels.get(&key_clone).cloned().unwrap_or_else(|| key_clone.clone());
|
|
||||||
view! {
|
view! {
|
||||||
<option value={format!("{} - {}", key, label)}>{ format!("{} - {}", key, label) }</option>
|
<option value={property}>{ format!("{} - {}", property_clone, label) }</option>
|
||||||
}
|
}
|
||||||
}).collect::<Vec<_>>()
|
}).collect::<Vec<_>>()
|
||||||
}}
|
}}
|
||||||
|
|
Loading…
Add table
Reference in a new issue