fix(item_list): work on backend to bypass CORS (Cross-Origin Resource Sharing) restrictions
This commit is contained in:
parent
8fed7eeafe
commit
7c620d1642
4 changed files with 116 additions and 36 deletions
1
src/api/mod.rs
Normal file
1
src/api/mod.rs
Normal file
|
@ -0,0 +1 @@
|
|||
pub mod wikidata;
|
104
src/api/wikidata.rs
Normal file
104
src/api/wikidata.rs
Normal file
|
@ -0,0 +1,104 @@
|
|||
use leptos::*;
|
||||
use gloo_net::http::Request;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use leptos::logging::log;
|
||||
|
||||
#[server(FetchWikidataProperties, "/api/fetch_wikidata_properties")]
|
||||
pub async fn fetch_wikidata_properties(wikidata_id: String) -> Result<HashMap<String, String>, ServerFnError> {
|
||||
let url = format!(
|
||||
"https://www.wikidata.org/wiki/Special:EntityData/{}.json",
|
||||
wikidata_id
|
||||
);
|
||||
|
||||
log!("Fetching properties from Wikidata: {}", url);
|
||||
|
||||
// Send the HTTP request
|
||||
let response = match Request::get(&url).send().await {
|
||||
Ok(response) => response,
|
||||
Err(err) => {
|
||||
log!("Error fetching properties: {:?}", err);
|
||||
return Err(ServerFnError::ServerError("Failed to fetch properties".to_string()));
|
||||
}
|
||||
};
|
||||
|
||||
// Check the response status code
|
||||
if response.status() != 200 {
|
||||
log!("Error fetching properties: {}", response.status());
|
||||
return Err(ServerFnError::ServerError("Failed to fetch properties".to_string()));
|
||||
}
|
||||
|
||||
// Parse the response as JSON
|
||||
let data: Value = match response.json().await {
|
||||
Ok(data) => data,
|
||||
Err(err) => {
|
||||
log!("Error parsing properties response: {:?}", err);
|
||||
return Err(ServerFnError::ServerError("Failed to parse properties response".to_string()));
|
||||
}
|
||||
};
|
||||
|
||||
log!("Fetched properties: {:?}", data);
|
||||
|
||||
let mut result = HashMap::new();
|
||||
if let Some(entities) = data["entities"].as_object() {
|
||||
if let Some(entity) = entities.get(&wikidata_id) {
|
||||
if let Some(claims) = entity["claims"].as_object() {
|
||||
for (property, values) in claims {
|
||||
if let Some(value) = values[0]["mainsnak"]["datavalue"]["value"].as_str() {
|
||||
result.insert(property.clone(), value.to_string());
|
||||
} else if let Some(value) = values[0]["mainsnak"]["datavalue"]["value"].as_object() {
|
||||
result.insert(property.clone(), serde_json::to_string(value).unwrap());
|
||||
} else if let Some(value) = values[0]["mainsnak"]["datavalue"]["value"].as_f64() {
|
||||
result.insert(property.clone(), value.to_string());
|
||||
} else {
|
||||
result.insert(property.clone(), "Unsupported data type".to_string());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log!("No claims found for Wikidata ID: {}", wikidata_id);
|
||||
}
|
||||
} else {
|
||||
log!("Entity not found for Wikidata ID: {}", wikidata_id);
|
||||
}
|
||||
} else {
|
||||
log!("No entities found in the response");
|
||||
}
|
||||
|
||||
// Fetch labels for each property
|
||||
let property_ids: Vec<String> = result.keys().cloned().collect();
|
||||
let property_labels_url = format!(
|
||||
"https://www.wikidata.org/w/api.php?action=wbgetentities&ids={}&props=labels&languages=en&format=json",
|
||||
property_ids.join("|")
|
||||
);
|
||||
|
||||
// Send the HTTP request for property labels
|
||||
let labels_response = match Request::get(&property_labels_url).send().await {
|
||||
Ok(response) => response,
|
||||
Err(err) => {
|
||||
log!("Error fetching property labels: {:?}", err);
|
||||
return Err(ServerFnError::ServerError("Failed to fetch property labels".to_string()));
|
||||
}
|
||||
};
|
||||
|
||||
// Parse the response as JSON
|
||||
let labels_data: Value = match labels_response.json().await {
|
||||
Ok(data) => data,
|
||||
Err(err) => {
|
||||
log!("Error parsing property labels response: {:?}", err);
|
||||
return Err(ServerFnError::ServerError("Failed to parse property labels response".to_string()));
|
||||
}
|
||||
};
|
||||
|
||||
let mut new_result = HashMap::new();
|
||||
if let Some(entities) = labels_data["entities"].as_object() {
|
||||
for (id, entity) in entities {
|
||||
if let Some(label) = entity["labels"]["en"]["value"].as_str() {
|
||||
if let Some(value) = result.get(id) {
|
||||
new_result.insert(label.to_string(), value.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
log!("Fetched properties: {:?}", result); // Log the fetched properties
|
||||
Ok(new_result)
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
use crate::components::editable_cell::EditableCell;
|
||||
use crate::components::editable_cell::InputType;
|
||||
use crate::api::wikidata::fetch_wikidata_properties;
|
||||
use leptos::*;
|
||||
use serde::Deserialize;
|
||||
use uuid::Uuid;
|
||||
|
@ -79,46 +80,19 @@ pub fn ItemsList(
|
|||
};
|
||||
|
||||
//function to fetch properties
|
||||
async fn fetch_item_properties(wikidata_id: &str, set_fetched_properties: WriteSignal<HashMap<String, String>>) -> HashMap<String, String> {
|
||||
let url = format!(
|
||||
"https://www.wikidata.org/wiki/Special:EntityData/{}.json",
|
||||
wikidata_id
|
||||
);
|
||||
|
||||
match gloo_net::http::Request::get(&url).send().await {
|
||||
Ok(response) => {
|
||||
if let Ok(data) = response.json::<serde_json::Value>().await {
|
||||
if let Some(entities) = data["entities"].as_object() {
|
||||
if let Some(entity) = entities.get(wikidata_id) {
|
||||
if let Some(claims) = entity["claims"].as_object() {
|
||||
let mut result = HashMap::new();
|
||||
for (property, values) in claims {
|
||||
if let Some(value) = values[0]["mainsnak"]["datavalue"]["value"].as_str() {
|
||||
result.insert(property.clone(), value.to_string());
|
||||
} else if let Some(value) = values[0]["mainsnak"]["datavalue"]["value"].as_object() {
|
||||
result.insert(property.clone(), serde_json::to_string(value).unwrap());
|
||||
} else if let Some(value) = values[0]["mainsnak"]["datavalue"]["value"].as_f64() {
|
||||
result.insert(property.clone(), value.to_string());
|
||||
} else {
|
||||
result.insert(property.clone(), "Unsupported data type".to_string());
|
||||
}
|
||||
}
|
||||
set_fetched_properties.update(|properties| {
|
||||
for (key, val) in result.clone() {
|
||||
properties.insert(key.clone(), val.clone());
|
||||
async fn fetch_item_properties(wikidata_id: &str, set_fetched_properties: WriteSignal<HashMap<String, String>>) {
|
||||
match fetch_wikidata_properties(wikidata_id.to_string()).await {
|
||||
Ok(properties) => {
|
||||
log!("Fetched properties for Wikidata ID {}: {:?}",wikidata_id, properties);
|
||||
set_fetched_properties.update(|props| {
|
||||
for (key, val) in properties {
|
||||
props.insert(key, val);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
Err(err) => log!("Error fetching properties: {:?}", err),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => log!("Error fetching item properties: {:?}", err),
|
||||
}
|
||||
|
||||
HashMap::new()
|
||||
}
|
||||
|
||||
|
||||
// Add a new custom property
|
||||
|
|
|
@ -2,6 +2,7 @@ pub mod app;
|
|||
pub mod components;
|
||||
pub mod models;
|
||||
pub mod nostr;
|
||||
pub mod api;
|
||||
|
||||
|
||||
#[cfg(feature = "hydrate")]
|
||||
|
|
Loading…
Add table
Reference in a new issue