Use an additional trait and some cfgs to refactor the thread away

As wasm doesn't support threads and we're using a background
processing thread. We had to refactor this away. The solution
is mediocre (for now) but involves a separate `DatabaseQuery`
trait which is trait object compatible and is used to make
synchronous calls.
pull/8/head
Benedikt Terhechte 2 years ago
parent 73043bd655
commit 8ee485f7a7

@ -8,7 +8,11 @@ use crate::Config;
use super::{db_message::DBMessage, query::Query, query_result::QueryResult}; use super::{db_message::DBMessage, query::Query, query_result::QueryResult};
pub trait DatabaseLike: Clone + Send + 'static { pub trait DatabaseQuery: Send + 'static {
fn query(&self, query: &Query) -> Result<Vec<QueryResult>>;
}
pub trait DatabaseLike: DatabaseQuery + Clone {
fn new(path: impl AsRef<Path>) -> Result<Self> fn new(path: impl AsRef<Path>) -> Result<Self>
where where
Self: Sized; Self: Sized;
@ -16,7 +20,6 @@ pub trait DatabaseLike: Clone + Send + 'static {
where where
Self: Sized; Self: Sized;
fn total_mails(&self) -> Result<usize>; fn total_mails(&self) -> Result<usize>;
fn query(&self, query: &Query) -> Result<Vec<QueryResult>>;
fn import(self) -> (Sender<DBMessage>, JoinHandle<Result<usize>>); fn import(self) -> (Sender<DBMessage>, JoinHandle<Result<usize>>);
fn save_config(&self, config: Config) -> Result<()>; fn save_config(&self, config: Config) -> Result<()>;
} }

@ -4,7 +4,7 @@ pub mod message_adapter;
pub mod model; pub mod model;
mod types; mod types;
pub use database::database_like::DatabaseLike; pub use database::database_like::{DatabaseLike, DatabaseQuery};
pub use database::db_message::DBMessage; pub use database::db_message::DBMessage;
pub use database::query::{Field, Filter, OtherQuery, Query, ValueField, AMOUNT_FIELD_NAME}; pub use database::query::{Field, Filter, OtherQuery, Query, ValueField, AMOUNT_FIELD_NAME};
pub use database::query_result::{QueryResult, QueryRow}; pub use database::query_result::{QueryResult, QueryRow};

@ -5,6 +5,7 @@
//! - [`segmentations::`] //! - [`segmentations::`]
//! - [`items::`] //! - [`items::`]
use eyre::{bail, Result}; use eyre::{bail, Result};
use lru::LruCache; use lru::LruCache;
use crate::database::query::{Field, Filter, OtherQuery, Query, ValueField}; use crate::database::query::{Field, Filter, OtherQuery, Query, ValueField};
@ -49,7 +50,12 @@ pub struct Engine {
impl Engine { impl Engine {
pub fn new<Database: DatabaseLike + 'static>(config: &Config) -> Result<Self> { pub fn new<Database: DatabaseLike + 'static>(config: &Config) -> Result<Self> {
#[cfg(not(target_arch = "wasm32"))]
let link = super::link::run::<_, Database>(config)?; let link = super::link::run::<_, Database>(config)?;
#[cfg(target_arch = "wasm32")]
let link = super::link::run::<_, Database>(config, Database::new(&config.database_path)?)?;
let engine = Engine { let engine = Engine {
link, link,
search_stack: Vec::new(), search_stack: Vec::new(),

@ -11,12 +11,13 @@ use std::sync::{
}; };
use std::{collections::HashSet, convert::TryInto}; use std::{collections::HashSet, convert::TryInto};
#[allow(unused)]
use crossbeam_channel::{unbounded, Receiver, Sender}; use crossbeam_channel::{unbounded, Receiver, Sender};
use eyre::Result; use eyre::Result;
use serde_json::Value; use serde_json::Value;
use crate::database::{ use crate::database::{
database_like::DatabaseLike, database_like::DatabaseQuery,
query::Query, query::Query,
query_result::{QueryResult, QueryRow}, query_result::{QueryResult, QueryRow},
}; };
@ -35,8 +36,14 @@ pub enum Response<Context: Send + 'static> {
pub(super) type InputSender<Context> = Sender<(Query, Context)>; pub(super) type InputSender<Context> = Sender<(Query, Context)>;
pub(super) type OutputReciever<Context> = Receiver<Result<Response<Context>>>; pub(super) type OutputReciever<Context> = Receiver<Result<Response<Context>>>;
// FIXME: Instead of this wasm mess, two different link types?
pub(super) struct Link<Context: Send + 'static> { pub(super) struct Link<Context: Send + 'static> {
database: Box<dyn DatabaseQuery>,
#[cfg(not(target_arch = "wasm32"))]
pub input_sender: InputSender<Context>, pub input_sender: InputSender<Context>,
#[cfg(not(target_arch = "wasm32"))]
pub output_receiver: OutputReciever<Context>, pub output_receiver: OutputReciever<Context>,
// We need to account for the brief moment where the processing channel is empty // We need to account for the brief moment where the processing channel is empty
// but we're applying the results. If there is a UI update in this window, // but we're applying the results. If there is a UI update in this window,
@ -46,8 +53,47 @@ pub(super) struct Link<Context: Send + 'static> {
// put into the output channel. In order to account for all of this, we employ a // put into the output channel. In order to account for all of this, we employ a
// request counter to know how many requests are currently in the pipeline // request counter to know how many requests are currently in the pipeline
request_counter: Arc<AtomicUsize>, request_counter: Arc<AtomicUsize>,
#[cfg(target_arch = "wasm32")]
response: Vec<Response<Context>>,
}
#[cfg(target_arch = "wasm32")]
impl<Context: Send + Sync + 'static> Link<Context> {
pub fn request(&mut self, query: &Query, context: Context) -> Result<()> {
let result = self.database.query(&query)?;
if let Some(response) = process_query(query.clone(), result, context).ok() {
self.response.insert(0, response);
}
Ok(())
}
pub fn receive(&mut self) -> Result<Option<Response<Context>>> {
Ok(self.response.pop())
}
pub fn is_processing(&self) -> bool {
self.request_counter.load(Ordering::Relaxed) > 0
}
pub fn request_counter(&self) -> Arc<AtomicUsize> {
self.request_counter.clone()
}
}
#[cfg(target_arch = "wasm32")]
pub(super) fn run<Context: Send + Sync + 'static, Database: DatabaseQuery>(
config: &Config,
database: Database,
) -> Result<Link<Context>> {
Ok(Link {
database: Box::new(database),
request_counter: Arc::new(AtomicUsize::new(0)),
response: Vec::new(),
})
} }
#[cfg(not(target_arch = "wasm32"))]
impl<Context: Send + Sync + 'static> Link<Context> { impl<Context: Send + Sync + 'static> Link<Context> {
pub fn request(&mut self, query: &Query, context: Context) -> Result<()> { pub fn request(&mut self, query: &Query, context: Context) -> Result<()> {
self.request_counter.fetch_add(1, Ordering::Relaxed); self.request_counter.fetch_add(1, Ordering::Relaxed);
@ -81,7 +127,8 @@ impl<Context: Send + Sync + 'static> Link<Context> {
} }
} }
pub(super) fn run<Context: Send + Sync + 'static, Database: DatabaseLike + 'static>( #[cfg(not(target_arch = "wasm32"))]
pub(super) fn run<Context: Send + Sync + 'static, Database: DatabaseQuery>(
config: &Config, config: &Config,
) -> Result<Link<Context>> { ) -> Result<Link<Context>> {
// Create a new database connection, just for reading // Create a new database connection, just for reading
@ -96,7 +143,8 @@ pub(super) fn run<Context: Send + Sync + 'static, Database: DatabaseLike + 'stat
}) })
} }
fn inner_loop<Context: Send + Sync + 'static, Database: DatabaseLike>( #[cfg(not(target_arch = "wasm32"))]
fn inner_loop<Context: Send + Sync + 'static, Database: DatabaseQuery>(
database: Database, database: Database,
input_receiver: Receiver<(Query, Context)>, input_receiver: Receiver<(Query, Context)>,
output_sender: Sender<Result<Response<Context>>>, output_sender: Sender<Result<Response<Context>>>,
@ -104,39 +152,54 @@ fn inner_loop<Context: Send + Sync + 'static, Database: DatabaseLike>(
loop { loop {
let (query, context) = input_receiver.recv()?; let (query, context) = input_receiver.recv()?;
let result = database.query(&query)?; let result = database.query(&query)?;
let response = match query { let response = process_query(query, result, context);
Query::Grouped { .. } => { output_sender.send(response)?;
let segmentations = calculate_segmentations(&result)?; }
Response::Grouped(query, context, segmentations) }
}
Query::Normal { .. } => { fn process_query<Context: Send + Sync + 'static>(
let converted = calculate_rows(&result)?; query: Query,
Response::Normal(query, context, converted) result: Vec<QueryResult>,
} context: Context,
Query::Other { .. } => { ) -> Result<Response<Context>> {
let mut results = HashSet::new(); let response = match query {
for entry in result { Query::Grouped { .. } => {
match entry { let segmentations = calculate_segmentations(&result)?;
QueryResult::Other(field) => match field.value() { Response::Grouped(query, context, segmentations)
Value::Array(s) => { }
for n in s { Query::Normal { .. } => {
if let Value::String(s) = n { let converted = calculate_rows(&result)?;
if !results.contains(s) { Response::Normal(query, context, converted)
results.insert(s.to_owned()); }
} Query::Other { .. } => {
let mut results = HashSet::new();
for entry in result {
match entry {
QueryResult::Other(field) => match field.value() {
Value::Array(s) => {
for n in s {
if let Value::String(s) = n {
if !results.contains(s) {
results.insert(s.to_owned());
} }
} }
} }
_ => panic!("Should not end up here"), }
}, _ => {
_ => panic!("Should not end up here"), #[cfg(debug_assertions)]
panic!("Should not end up here")
}
},
_ => {
#[cfg(debug_assertions)]
panic!("Should not end up here")
} }
} }
Response::Other(query, context, results.into_iter().collect())
} }
}; Response::Other(query, context, results.into_iter().collect())
output_sender.send(Ok(response))?; }
} };
Ok(response)
} }
fn calculate_segmentations(result: &[QueryResult]) -> Result<Segmentation> { fn calculate_segmentations(result: &[QueryResult]) -> Result<Segmentation> {

Loading…
Cancel
Save