First step to creating cargo workspaces, a core crate

pull/7/head
Benedikt Terhechte 2 years ago
parent d02ec78a75
commit 812d635bc5

761
core/Cargo.lock generated

@ -0,0 +1,761 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "adler"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "ahash"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
dependencies = [
"getrandom",
"once_cell",
"version_check",
]
[[package]]
name = "aho-corasick"
version = "0.7.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
dependencies = [
"memchr",
]
[[package]]
name = "ansi_term"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
dependencies = [
"winapi",
]
[[package]]
name = "arrayvec"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9"
dependencies = [
"nodrop",
]
[[package]]
name = "autocfg"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chrono"
version = "0.4.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
dependencies = [
"libc",
"num-integer",
"num-traits",
"time",
"winapi",
]
[[package]]
name = "crc32fast"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "738c290dfaea84fc1ca15ad9c168d083b05a714e1efddd8edaab678dc28d2836"
dependencies = [
"cfg-if",
]
[[package]]
name = "crossbeam-channel"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4"
dependencies = [
"cfg-if",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
dependencies = [
"cfg-if",
"crossbeam-epoch",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-epoch"
version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd"
dependencies = [
"cfg-if",
"crossbeam-utils",
"lazy_static",
"memoffset",
"scopeguard",
]
[[package]]
name = "crossbeam-utils"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db"
dependencies = [
"cfg-if",
"lazy_static",
]
[[package]]
name = "dirs-next"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1"
dependencies = [
"cfg-if",
"dirs-sys-next",
]
[[package]]
name = "dirs-sys-next"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
dependencies = [
"libc",
"redox_users",
"winapi",
]
[[package]]
name = "either"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
[[package]]
name = "eyre"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "221239d1d5ea86bf5d6f91c9d6bc3646ffe471b08ff9b0f91c44f115ac969d2b"
dependencies = [
"indenter",
"once_cell",
]
[[package]]
name = "flate2"
version = "1.0.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f"
dependencies = [
"cfg-if",
"crc32fast",
"libc",
"miniz_oxide",
]
[[package]]
name = "getrandom"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753"
dependencies = [
"cfg-if",
"libc",
"wasi",
]
[[package]]
name = "hashbrown"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
dependencies = [
"ahash",
]
[[package]]
name = "heck"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "hermit-abi"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
dependencies = [
"libc",
]
[[package]]
name = "indenter"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
[[package]]
name = "itoa"
version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
[[package]]
name = "itoa"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.112"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125"
[[package]]
name = "log"
version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
dependencies = [
"cfg-if",
]
[[package]]
name = "lru"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c748cfe47cb8da225c37595b3108bea1c198c84aaae8ea0ba76d01dda9fc803"
dependencies = [
"hashbrown",
]
[[package]]
name = "memchr"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
[[package]]
name = "memoffset"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
dependencies = [
"autocfg",
]
[[package]]
name = "miniz_oxide"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
dependencies = [
"adler",
"autocfg",
]
[[package]]
name = "nodrop"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
[[package]]
name = "num-format"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bafe4179722c2894288ee77a9f044f02811c86af699344c498b0840c698a2465"
dependencies = [
"arrayvec",
"itoa 0.4.8",
]
[[package]]
name = "num-integer"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
dependencies = [
"autocfg",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
dependencies = [
"autocfg",
]
[[package]]
name = "num_cpus"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "once_cell"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5"
[[package]]
name = "pin-project-lite"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443"
[[package]]
name = "postsack-core"
version = "0.2.0"
dependencies = [
"chrono",
"crossbeam-channel",
"eyre",
"flate2",
"lru",
"num-format",
"once_cell",
"rand",
"rayon",
"regex",
"rsql_builder",
"serde",
"serde_json",
"shellexpand",
"strum",
"strum_macros",
"thiserror",
"tracing",
"tracing-subscriber",
"treemap",
]
[[package]]
name = "ppv-lite86"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba"
[[package]]
name = "proc-macro2"
version = "1.0.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f84e92c0f7c9d58328b85a78557813e4bd845130db68d7184635344399423b1"
dependencies = [
"unicode-xid",
]
[[package]]
name = "quote"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rand"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
"rand_hc",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
]
[[package]]
name = "rand_core"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
dependencies = [
"getrandom",
]
[[package]]
name = "rand_hc"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7"
dependencies = [
"rand_core",
]
[[package]]
name = "rayon"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90"
dependencies = [
"autocfg",
"crossbeam-deque",
"either",
"rayon-core",
]
[[package]]
name = "rayon-core"
version = "1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
"crossbeam-utils",
"lazy_static",
"num_cpus",
]
[[package]]
name = "redox_syscall"
version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff"
dependencies = [
"bitflags",
]
[[package]]
name = "redox_users"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64"
dependencies = [
"getrandom",
"redox_syscall",
]
[[package]]
name = "regex"
version = "1.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.6.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
[[package]]
name = "rsql_builder"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9dbd5712883cef396d13516bb52b300fd97a29d52ca20361f0a4905bd38a2355"
dependencies = [
"serde",
"serde_json",
]
[[package]]
name = "rustversion"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f"
[[package]]
name = "ryu"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
[[package]]
name = "scopeguard"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
version = "1.0.131"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4ad69dfbd3e45369132cc64e6748c2d65cdfb001a2b1c232d128b4ad60561c1"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.131"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b710a83c4e0dff6a3d511946b95274ad9ca9e5d3ae497b63fda866ac955358d2"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bcbd0344bc6533bc7ec56df11d42fb70f1b912351c0825ccb7211b59d8af7cf5"
dependencies = [
"itoa 1.0.1",
"ryu",
"serde",
]
[[package]]
name = "sharded-slab"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
dependencies = [
"lazy_static",
]
[[package]]
name = "shellexpand"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83bdb7831b2d85ddf4a7b148aa19d0587eddbe8671a436b7bd1182eaad0f2829"
dependencies = [
"dirs-next",
]
[[package]]
name = "smallvec"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309"
[[package]]
name = "strum"
version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cae14b91c7d11c9a851d3fbc80a963198998c2a64eec840477fa92d8ce9b70bb"
[[package]]
name = "strum_macros"
version = "0.23.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bb0dc7ee9c15cea6199cde9a127fa16a4c5819af85395457ad72d68edc85a38"
dependencies = [
"heck",
"proc-macro2",
"quote",
"rustversion",
"syn",
]
[[package]]
name = "syn"
version = "1.0.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8daf5dd0bb60cbd4137b1b587d2fc0ae729bc07cf01cd70b36a1ed5ade3b9d59"
dependencies = [
"proc-macro2",
"quote",
"unicode-xid",
]
[[package]]
name = "thiserror"
version = "1.0.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "thread_local"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8018d24e04c95ac8790716a5987d0fec4f8b27249ffa0f7d33f1369bdfb88cbd"
dependencies = [
"once_cell",
]
[[package]]
name = "time"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255"
dependencies = [
"libc",
"wasi",
"winapi",
]
[[package]]
name = "tracing"
version = "0.1.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105"
dependencies = [
"cfg-if",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4f480b8f81512e825f337ad51e94c1eb5d3bbdf2b363dcd01e2b19a9ffe3f8e"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tracing-core"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f4ed65637b8390770814083d20756f87bfa2c21bf2f110babdc5438351746e4"
dependencies = [
"lazy_static",
]
[[package]]
name = "tracing-log"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6923477a48e41c1951f1999ef8bb5a3023eb723ceadafe78ffb65dc366761e3"
dependencies = [
"lazy_static",
"log",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "245da694cc7fc4729f3f418b304cb57789f1bed2a78c575407ab8a23f53cb4d3"
dependencies = [
"ansi_term",
"sharded-slab",
"smallvec",
"thread_local",
"tracing-core",
"tracing-log",
]
[[package]]
name = "treemap"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1571f89da27a5e1aa83304ee1ab9519ea8c6432b4c8903aaaa6c9a9eecb6f36"
[[package]]
name = "unicode-segmentation"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b"
[[package]]
name = "unicode-xid"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
[[package]]
name = "version_check"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe"
[[package]]
name = "wasi"
version = "0.10.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"

@ -0,0 +1,30 @@
[package]
name = "postsack-core"
version = "0.2.0"
edition = "2021"
description = "Provides a high level visual overview of swaths of email"
[dependencies]
eyre = "0.6.5"
thiserror = "1.0.29"
tracing = "0.1.29"
tracing-subscriber = "0.3.0"
regex = "1.5.3"
flate2 = "1.0.22"
once_cell = "1.8.0"
rayon = "1.5.1"
chrono = "0.4.19"
serde_json = "1.0.70"
serde = { version = "1.0.130", features = ["derive"]}
crossbeam-channel = "0.5.1"
rsql_builder = "0.1.2"
treemap = "0.3.2"
num-format = "0.4.0"
strum = "0.23.0"
strum_macros = "0.23.0"
lru = { version = "0.7.0", optional = true }
rand = "0.8.4"
shellexpand = "2.1.0"
[features]
default = ["lru"]

@ -0,0 +1,12 @@
use eyre::Result;
use std::path::Path;
use super::{query::Query, query_result::QueryResult};
pub trait DatabaseLike: Send + Sync {
fn new(path: impl AsRef<Path>) -> Result<Self>
where
Self: Sized;
fn total_mails(&self) -> Result<usize>;
fn query(&self, query: &Query) -> Result<Vec<QueryResult>>;
}

@ -0,0 +1,3 @@
pub mod database_like;
pub mod query;
pub mod query_result;

@ -0,0 +1,242 @@
use rsql_builder;
use serde_json;
pub use serde_json::Value;
use strum::{self, IntoEnumIterator};
use strum_macros::{EnumIter, IntoStaticStr};
use std::ops::Range;
pub const AMOUNT_FIELD_NAME: &str = "amount";
#[derive(Clone, Debug)]
pub enum Filter {
/// A database Like Operation
Like(ValueField),
NotLike(ValueField),
/// A extended like that implies:
/// - wildcards on both sides (like '%test%')
/// - case in-sensitive comparison
/// - Trying to handle values as strings
Contains(ValueField),
Is(ValueField),
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, IntoStaticStr, EnumIter)]
#[strum(serialize_all = "snake_case")]
pub enum Field {
Path,
SenderDomain,
SenderLocalPart,
SenderName,
Year,
Month,
Day,
Timestamp,
ToGroup,
ToName,
ToAddress,
IsReply,
IsSend,
Subject,
MetaIsSeen,
MetaTags,
}
const INVALID_FIELDS: &[Field] = &[
Field::Path,
Field::Subject,
Field::Timestamp,
Field::IsReply,
Field::IsSend,
Field::MetaIsSeen,
Field::MetaTags,
];
impl Field {
pub fn all_cases() -> impl Iterator<Item = Field> {
Field::iter().filter(|f| !INVALID_FIELDS.contains(f))
}
/// Just a wrapper to offer `into` without the type ambiguity
/// that sometimes arises
pub fn as_str(&self) -> &'static str {
self.into()
}
/// A human readable name
pub fn name(&self) -> &str {
use Field::*;
match self {
SenderDomain => "Domain",
SenderLocalPart => "Address",
SenderName => "Name",
ToGroup => "Group",
ToName => "To name",
ToAddress => "To address",
Year => "Year",
Month => "Month",
Day => "Day",
Subject => "Subject",
_ => self.as_str(),
}
}
}
impl std::fmt::Display for Field {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.name())
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ValueField {
field: Field,
value: Value,
}
impl ValueField {
pub fn string<S: AsRef<str>>(field: &Field, value: S) -> ValueField {
ValueField {
field: *field,
value: Value::String(value.as_ref().to_string()),
}
}
pub fn bool(field: &Field, value: bool) -> ValueField {
ValueField {
field: *field,
value: Value::Bool(value),
}
}
pub fn usize(field: &Field, value: usize) -> ValueField {
ValueField {
field: *field,
value: Value::Number(value.into()),
}
}
pub fn array(field: &Field, value: Vec<Value>) -> ValueField {
ValueField {
field: *field,
value: Value::Array(value),
}
}
pub fn field(&self) -> &Field {
&self.field
}
pub fn value(&self) -> &Value {
&self.value
}
#[allow(clippy::inherent_to_string)]
pub fn to_string(&self) -> String {
match &self.value {
Value::String(s) => s.clone(),
_ => format!("{}", &self.value),
}
}
}
#[derive(Debug, Clone)]
pub enum OtherQuery {
/// Get all contents of a specific field
All(Field),
}
#[derive(Clone, Debug)]
pub enum Query {
Grouped {
filters: Vec<Filter>,
group_by: Field,
},
Normal {
fields: Vec<Field>,
filters: Vec<Filter>,
range: Range<usize>,
},
Other {
query: OtherQuery,
},
}
impl Query {
fn filters(&self) -> &[Filter] {
match self {
Query::Grouped { ref filters, .. } => filters,
Query::Normal { ref filters, .. } => filters,
Query::Other { .. } => &[],
}
}
}
impl Query {
pub fn to_sql(&self) -> (String, Vec<serde_json::Value>) {
let mut conditions = {
let mut whr = rsql_builder::B::new_where();
for filter in self.filters() {
match filter {
Filter::Like(f) => whr.like(f.field.into(), f.value()),
Filter::NotLike(f) => whr.not_like(f.field.into(), f.value()),
Filter::Contains(f) => whr.like(
f.field.into(),
&format!("%{}%", f.to_string().to_lowercase()),
),
Filter::Is(f) => whr.eq(f.field.into(), f.value()),
};
}
whr
};
let (header, group_by) = match self {
Query::Grouped { group_by, .. } => (
format!(
"SELECT count(path) as {}, {} FROM emails",
AMOUNT_FIELD_NAME,
group_by.as_str()
),
format!("GROUP BY {}", group_by.as_str()),
),
Query::Normal { fields, range, .. } => {
let fields: Vec<&str> = fields.iter().map(|e| e.into()).collect();
(
format!("SELECT {} FROM emails", fields.join(", ")),
format!("LIMIT {}, {}", range.start, range.end - range.start),
)
}
Query::Other {
query: OtherQuery::All(field),
} => (
format!("SELECT {} FROM emails", field.as_str()),
format!(""),
),
};
let (sql, values) = rsql_builder::B::prepare(
rsql_builder::B::new_sql(&header)
.push_build(&mut conditions)
.push_sql(&group_by),
);
(sql, values)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_test() {
let query = Query::Grouped {
filters: vec![
Filter::Like(ValueField::string(&Field::SenderDomain, "gmail.com")),
Filter::Is(ValueField::usize(&Field::Year, 2021)),
],
group_by: Field::Month,
};
dbg!(&query.to_sql());
}
}

@ -0,0 +1,17 @@
use super::query::{Field, ValueField};
use std::collections::HashMap;
pub type QueryRow = HashMap<Field, ValueField>;
#[derive(Debug)]
pub enum QueryResult {
Grouped {
/// How many items did we find?
count: usize,
/// All the itmes that we grouped by including their values.
/// So that we can use each of them to limit the next query.
value: ValueField,
},
Normal(QueryRow),
Other(ValueField),
}

@ -0,0 +1,40 @@
use crossbeam_channel;
use eyre::{Report, Result};
use std::thread::JoinHandle;
pub trait Importerlike {
fn import(self) -> Result<(MessageReceiver, JoinHandle<Result<()>>)>;
}
/// The message that informs of the importers progress
#[derive(Debug)]
pub enum Message {
/// How much progress are we making on reading the contents
/// of the emails.
/// The `usize` parameter marks the total amount of items to read - if it is known.
/// The values here can vary wildly based on the type of Importer `Format` in use.
/// A Gmail backup will list the folders and how many of them
/// are already read. A mbox format will list other things as there
/// no folders.
ReadTotal(usize),
/// Whenever an item out of the total is read, this message will be emitted
ReadOne,
/// Similar to [`ReadTotal`]
WriteTotal(usize),
/// Similar to `ReadOne`
WriteOne,
/// Once everything has been written, we need to wait for the database
/// to sync
FinishingUp,
/// Finally, this indicates that we're done.
Done,
/// An error happened during processing
Error(eyre::Report),
/// A special case for macOS, where a permission error means we have to grant this app
/// the right to see the mail folder
#[cfg(target_os = "macos")]
MissingPermissions,
}
pub type MessageSender = crossbeam_channel::Sender<Message>;
pub type MessageReceiver = crossbeam_channel::Receiver<Message>;

@ -0,0 +1,4 @@
mod database;
mod importer;
mod model;
mod types;

@ -0,0 +1,247 @@
//! The `Engine` is the entry point to the data that should be
//! displayed in Segmentations.
//! See [`Engine`] for more information.
//! See also:
//! - [`segmentations::`]
//! - [`items::`]
use eyre::{bail, Result};
use lru::LruCache;
use crate::database::query::{Field, Filter, OtherQuery, Query, ValueField};
use crate::model::link::Response;
use crate::types::Config;
use super::link::Link;
use super::segmentations;
use super::types::{LoadingState, Segment, Segmentation};
use crate::database::database_like::DatabaseLike;
/// This signifies the action we're currently evaluating
/// It is used for sending requests and receiving responses
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub(super) enum Action {
/// Recalculate the current `Segmentation` based on a changed aggregation
RecalculateSegmentation,
/// Push a new `Segmentation`
PushSegmentation,
/// Load the mails for the current `Segmentation`
LoadItems,
/// Load all tags
AllTags,
}
/// Interact with the `Database`, operate on `Segmentations`, `Segments`, and `Items`.
/// `Engine` is used as the input for almost all operations in the
/// `items::` and `segmentation::` modules.
pub struct Engine {
pub(super) search_stack: Vec<ValueField>,
pub(super) group_by_stack: Vec<Field>,
pub(super) link: Link<Action>,
pub(super) segmentations: Vec<Segmentation>,
/// Additional filters. See [`segmentations::set_filters`]
pub(super) filters: Vec<Filter>,
/// This is a very simple cache from ranges to rows.
/// It doesn't account for overlapping ranges.
/// There's a lot of room for improvement here.
pub(super) item_cache: LruCache<usize, LoadingState>,
pub(super) known_tags: Vec<String>,
}
impl Engine {
pub fn new<Database: DatabaseLike + 'static>(
config: &Config,
database: Database,
) -> Result<Self> {
let link = super::link::run(config, database)?;
let engine = Engine {
link,
search_stack: Vec::new(),
group_by_stack: vec![default_group_by_stack(0).unwrap()],
segmentations: Vec::new(),
filters: Vec::new(),
item_cache: LruCache::new(10000),
known_tags: Vec::new(),
};
Ok(engine)
}
/// Start the `Engine`. This will create a thread to
/// asynchronously communicate with the underlying backend
/// in a non-blocking manner.
pub fn start(&mut self) -> Result<()> {
// The initial segmentation
self.link
.request(&segmentations::make_query(self)?, Action::PushSegmentation)?;
// Get all tags
self.link.request(
&Query::Other {
query: OtherQuery::All(Field::MetaTags),
},
Action::AllTags,
)
}
/// Information on the underlying `Format`. Does it have tags
pub fn format_has_tags(&self) -> bool {
!self.known_tags.is_empty()
}
/// Information on the underlying `Format`. Does it have `seen` information
pub fn format_has_seen(&self) -> bool {
// FIXME: The current implementation just assumes that the existance of meta tags also implies is_seen
!self.known_tags.is_empty()
}
/// All the known tags in the current emails
pub fn known_tags(&self) -> &[String] {
&self.known_tags
}
/// Return the current stack of `Segmentations`
pub fn segmentations(&self) -> &[Segmentation] {
&self.segmentations
}
/// Push a new `Segment` to select a more specific `Segmentation`.
///
/// Pushing will create an additional `Aggregation` based on the selected
/// `Segment`, retrieve the data from the backend, and add it to the
/// current stack of `Segmentations`.
/// It allows to **drill down** into the data.
pub fn push(&mut self, segment: Segment) -> Result<()> {
// Assign the segmentation
let current = match self.segmentations.last_mut() {
Some(n) => n,
None => return Ok(()),
};
current.selected = Some(segment);
// Create the new search stack
self.search_stack = self
.segmentations
.iter()
.filter_map(|e| e.selected.as_ref())
.map(|p| p.field.clone())
.collect();
// Add the next group by
let index = self.group_by_stack.len();
let next = default_group_by_stack(index)
.ok_or_else(|| eyre::eyre!("default group by stack out of bounds"))?;
self.group_by_stack.push(next);
// Block UI & Wait for updates
self.link
.request(&segmentations::make_query(self)?, Action::PushSegmentation)
}
/// Pop the current `Segmentation` from the stack.
/// The opposite of [`engine::push`]
pub fn pop(&mut self) {
if self.group_by_stack.is_empty()
|| self.segmentations.is_empty()
|| self.search_stack.is_empty()
{
tracing::error!(
"Invalid state. Not everything has the same length: {:?}, {:?}, {:?}",
&self.group_by_stack,
self.segmentations,
self.search_stack
);
return;
}
// Remove the last entry of everything
self.group_by_stack.remove(self.group_by_stack.len() - 1);
self.segmentations.remove(self.segmentations.len() - 1);
self.search_stack.remove(self.search_stack.len() - 1);
// Remove the selection in the last segmentation
if let Some(e) = self.segmentations.last_mut() {
e.selected = None
}
// Remove any rows that were cached for this segmentation
self.item_cache.clear();
}
/// Call this continously to retrieve calculation results and apply them.
/// Any mutating function on [`Engine`], such as [`Engine::push`] or [`items::items`]
/// require calling this method to apply there results once they're
/// available from the asynchronous backend.
/// This method is specifically non-blocking for usage in
/// `Eventloop` based UI frameworks such as `egui`.
pub fn process(&mut self) -> Result<()> {
let response = match self.link.receive()? {
Some(n) => n,
None => return Ok(()),
};
match response {
Response::Grouped(_, Action::PushSegmentation, p) => {
self.segmentations.push(p);
// Remove any rows that were cached for this segmentation
self.item_cache.clear();
}
Response::Grouped(_, Action::RecalculateSegmentation, p) => {
let len = self.segmentations.len();
self.segmentations[len - 1] = p;
// Remove any rows that were cached for this segmentation
self.item_cache.clear();
}
Response::Normal(Query::Normal { range, .. }, Action::LoadItems, r) => {
for (index, row) in range.zip(r) {
let entry = LoadingState::Loaded(row.clone());
self.item_cache.put(index, entry);
}
}
Response::Other(Query::Other { .. }, Action::AllTags, r) => {
self.known_tags = r;
}
_ => bail!("Invalid Query / Response combination"),
}
Ok(())
}
/// Returns true if there're currently calculations open and `process`
/// needs to be called. This can be used in `Eventloop` based frameworks
/// such as `egui` to know when to continue calling `process` in the `loop`
/// ```ignore
/// loop {
/// self.engine.process().unwrap();
/// if self.engine.is_busy() {
/// // Call the library function to run the event-loop again.
/// ctx.request_repaint();
/// }
/// }
/// ```
pub fn is_busy(&self) -> bool {
self.link.is_processing() || self.segmentations.is_empty()
}
/// Blocking waiting until the current operation is done
/// This is useful for usage on a commandline or in unit tests
#[allow(unused)]
pub fn wait(&mut self) -> Result<()> {
loop {
self.process()?;
if !self.link.is_processing() {
break;
}
}
Ok(())
}
}
/// Return the default aggregation fields for each segmentation stack level
pub fn default_group_by_stack(index: usize) -> Option<Field> {
match index {
0 => Some(Field::Year),
1 => Some(Field::SenderDomain),
2 => Some(Field::SenderLocalPart),
3 => Some(Field::Month),
4 => Some(Field::Day),
_ => None,
}
}

@ -0,0 +1,93 @@
//! Operations related to retrieving `items` from the current `Segmentation`
//!
//! A `Segmentation` is a aggregation of items into many `Segments`.
//! These operations allow retreiving the individual items for all
//! segments in the `Segmentation.
use eyre::Result;
use super::types::LoadingState;
use super::{engine::Action, Engine};
use crate::database::{
query::{Field, Filter, Query},
query_result::QueryRow,
};
use std::ops::Range;
/// Return the `items` in the current `Segmentation`
///
/// If the items don't exist in the cache, they will be queried
/// asynchronously from the database. The return value distinguishes
/// between `Loaded` and `Loading` items.
///
/// # Arguments
///
/// * `engine` - The engine to use for retrieving data
/// * `range` - The range of items to retrieve. If `None` then all items will be retrieved
pub fn items(engine: &mut Engine, range: Option<Range<usize>>) -> Result<Vec<Option<QueryRow>>> {
// build an array with either empty values or values from our cache.
let mut rows = Vec::new();
// The given range or all items
let range = range.unwrap_or_else(|| Range {
start: 0,
end: count(engine),
});
let mut missing_data = false;
for index in range.clone() {
let entry = engine.item_cache.get(&index);
let entry = match entry {
Some(LoadingState::Loaded(n)) => Some((*n).clone()),
Some(LoadingState::Loading) => None,
None => {
// for simplicity, we keep the "something is missing" state separate
missing_data = true;
// Mark the row as being loaded
engine.item_cache.put(index, LoadingState::Loading);
None
}
};
rows.push(entry);
}
// Only if at least some data is missing do we perform the request
if missing_data && !range.is_empty() {
let request = make_query(engine, range);
engine.link.request(&request, Action::LoadItems)?;
}
Ok(rows)
}
/// The total amount of elements in the current `Segmentation`
///
/// # Arguments
///
/// * `engine` - The engine to use for retrieving data
pub fn count(engine: &Engine) -> usize {
let segmentation = match engine.segmentations.last() {
Some(n) => n,
None => return 0,
};
segmentation.element_count()
}
/// Make the query for retrieving items
fn make_query(engine: &Engine, range: Range<usize>) -> Query {
let mut filters = Vec::new();
for entry in &engine.search_stack {
filters.push(Filter::Like(entry.clone()));
}
Query::Normal {
filters,
fields: vec![
Field::SenderDomain,
Field::SenderLocalPart,
Field::Subject,
Field::Path,
Field::Timestamp,
],
range,
}
}

@ -0,0 +1,166 @@
//! Abstraction to perform asynchronous calculations & queries without blocking UI
//!
//! This opens a `crossbeam` `channel` to communicate with a backend.
//! Each backend operation is send and retrieved in a loop on a thread.
//! This allows sending operations into `Link` and retrieving the contents
//! asynchronously without blocking the UI.
use std::sync::{
atomic::{AtomicUsize, Ordering},
Arc,
};
use std::{collections::HashSet, convert::TryInto};
use crossbeam_channel::{unbounded, Receiver, Sender};
use eyre::Result;
use serde_json::Value;
use crate::database::{
database_like::DatabaseLike,
query::Query,
query_result::{QueryResult, QueryRow},
};
use crate::types::Config;
use super::types::Segmentation;
#[derive(Debug)]
pub enum Response<Context: Send + 'static> {
Grouped(Query, Context, Segmentation),
Normal(Query, Context, Vec<QueryRow>),
/// FIXME: OtherQuery results are currently limited to strings as that's enough right now.
Other(Query, Context, Vec<String>),
}
pub(super) type InputSender<Context> = Sender<(Query, Context)>;
pub(super) type OutputReciever<Context> = Receiver<Result<Response<Context>>>;
pub(super) struct Link<Context: Send + 'static> {
pub input_sender: InputSender<Context>,
pub output_receiver: OutputReciever<Context>,
// We need to account for the brief moment where the processing channel is empty
// but we're applying the results. If there is a UI update in this window,
// the UI will not update again after the changes were applied because an empty
// channel indicates completed processing.
// There's also a delay between a request taken out of the input channel and being
// put into the output channel. In order to account for all of this, we employ a
// request counter to know how many requests are currently in the pipeline
request_counter: Arc<AtomicUsize>,
}
impl<Context: Send + Sync + 'static> Link<Context> {
pub fn request(&mut self, query: &Query, context: Context) -> Result<()> {
self.request_counter.fetch_add(1, Ordering::Relaxed);
self.input_sender.send((query.clone(), context))?;
Ok(())
}
pub fn receive(&mut self) -> Result<Option<Response<Context>>> {
match self.output_receiver.try_recv() {
// We received something
Ok(Ok(response)) => {
// Only subtract if we successfuly received a value
self.request_counter.fetch_sub(1, Ordering::Relaxed);
Ok(Some(response))
}
// We received nothing
Err(_) => Ok(None),
// There was an error, we forward it
Ok(Err(e)) => Err(e),
}
}
pub fn is_processing(&self) -> bool {
self.request_counter.load(Ordering::Relaxed) > 0
}
/// This can be used to track the `link` from a different thread.
#[allow(unused)]
pub fn request_counter(&self) -> Arc<AtomicUsize> {
self.request_counter.clone()
}
}
pub(super) fn run<Context: Send + Sync + 'static, Database: DatabaseLike + 'static>(
config: &Config,
database: Database,
) -> Result<Link<Context>> {
// Create a new database connection, just for reading
//let database = Database::new(&config.database_path)?;
let (input_sender, input_receiver) = unbounded();
let (output_sender, output_receiver) = unbounded();
let _ = std::thread::spawn(move || inner_loop(database, input_receiver, output_sender));
Ok(Link {
input_sender,
output_receiver,
request_counter: Arc::new(AtomicUsize::new(0)),
})
}
fn inner_loop<Context: Send + Sync + 'static, Database: DatabaseLike>(
database: Database,
input_receiver: Receiver<(Query, Context)>,
output_sender: Sender<Result<Response<Context>>>,
) -> Result<()> {
loop {
let (query, context) = input_receiver.recv()?;
let result = database.query(&query)?;
let response = match query {
Query::Grouped { .. } => {
let segmentations = calculate_segmentations(&result)?;
Response::Grouped(query, context, segmentations)
}
Query::Normal { .. } => {
let converted = calculate_rows(&result)?;
Response::Normal(query, context, converted)
}
Query::Other { .. } => {
let mut results = HashSet::new();
for entry in result {
match entry {
QueryResult::Other(field) => match field.value() {
Value::Array(s) => {
for n in s {
if let Value::String(s) = n {
if !results.contains(s) {
results.insert(s.to_owned());
}
}
}
}
_ => panic!("Should not end up here"),
},
_ => panic!("Should not end up here"),
}
}
Response::Other(query, context, results.into_iter().collect())
}
};
output_sender.send(Ok(response))?;
}
}
fn calculate_segmentations(result: &[QueryResult]) -> Result<Segmentation> {
let mut segmentations = Vec::new();
for r in result.iter() {
let segmentation = r.try_into()?;
segmentations.push(segmentation);
}
Ok(Segmentation::new(segmentations))
}
fn calculate_rows(result: &[QueryResult]) -> Result<Vec<QueryRow>> {
Ok(result
.iter()
.map(|r| {
let values = match r {
QueryResult::Normal(values) => values,
_ => {
panic!("Invalid result type, expected `Normal`")
}
};
values.clone()
})
.collect())
}

@ -0,0 +1,8 @@
mod engine;
pub mod items;
mod link;
pub mod segmentations;
mod types;
pub use engine::Engine;
pub use types::Segment;

@ -0,0 +1,206 @@
//! Operations on `Segmentations`
//!
//! `Segmentations` are collections of `Segments` based on an aggregation of `Items`.
//!
//! A `Segmentation` can be changed to be aggregated on a different `Field.
//! - [`aggregations`]
//! - [`aggregated_by`]
//! - [`set_aggregation`]
//! A `Segmentation` can be changed to only return a `Range` of segments.
//! - [`current_range`]
//! - [`set_current_range`]
//! A `Segmentation` has multiple `Segments` which each can be layouted
//! to fit into a rectangle.
//! - [`layouted_segments]
use eyre::{eyre, Result};
use super::engine::Action;
use super::{
types::{self, Aggregation, Segment},
Engine,
};
use crate::database::query::{Field, Filter, Query};
use std::ops::RangeInclusive;
/// Filter the `Range` of segments of the current `Segmentation`
///
/// Returns the `Range` and the total number of segments.
/// If no custom range has been set with [`set_segments_range`], returns
/// the full range of items, otherwise the custom range.
///
/// Returns `None` if no current `Segmentation` exists.
///
/// # Arguments
///
/// * `engine` - The engine to use for retrieving data
/// * `aggregation` - The aggregation to return the fields for. Required to also return the current aggregation field.
pub fn segments_range(engine: &Engine) -> Option<(RangeInclusive<usize>, usize)> {
let segmentation = engine.segmentations.last()?;
let len = segmentation.len();
Some(match &segmentation.range {
Some(n) => (0..=len, *n.end()),
None => (0..=len, len),
})
}
/// Set the `Range` of segments of the current `Segmentation`
///
/// # Arguments
///
/// * `engine` - The engine to use for setting data
/// * `range` - The range to apply. `None` to reset it to all `Segments`
pub fn set_segments_range(engine: &mut Engine, range: Option<RangeInclusive<usize>>) {
if let Some(n) = engine.segmentations.last_mut() {
// Make sure the range does not go beyond the current semgents count
if let Some(r) = range {
let len = n.len();
if len > *r.start() && *r.end() < len {
n.range = Some(r);
}
} else {
n.range = None;
}
}
}
/// Additional filters to use in the query
///
/// These filters will be evaluated in addition to the `segmentation` conditions
/// in the query.
/// Setting this value will recalculate the current segmentations.
pub fn set_filters(engine: &mut Engine, filters: &[Filter]) -> Result<()> {
engine.filters = filters.to_vec();
// Remove any rows that were cached for this Segmentation
engine.item_cache.clear();
engine
.link
.request(&make_query(engine)?, Action::RecalculateSegmentation)
}
/// The fields available for the given aggregation
///
/// As the user `pushes` Segmentations and dives into the data,
/// less fields become available to aggregate by. It is inconsequential
/// to aggregate, say, by year, then by month, and then again by year.
/// This method returns the possible fields still available for aggregation.
///
/// # Arguments
///
/// * `engine` - The engine to use for retrieving data
/// * `aggregation` - The aggregation to return the fields for. Required to also return the current aggregation field.
pub fn aggregation_fields(engine: &Engine, aggregation: &Aggregation) -> Vec<Field> {
#[allow(clippy::unnecessary_filter_map)]
Field::all_cases()
.filter_map(|f| {
if f == aggregation.field {
return Some(f);
}
if engine.group_by_stack.contains(&f) {
None
} else {
Some(f)
}
})
.collect()
}
/// Return all `Aggregation`s applied for the current `Segmentation`
///
/// E.g. if we're first aggregating by Year, and then by Month, this
/// will return a `Vec` of `[Year, Month]`.
///
/// # Arguments
///
/// * `engine` - The engine to use for retrieving data
pub fn aggregated_by(engine: &Engine) -> Vec<Aggregation> {
let mut result = Vec::new();
// for everything in the current stack
let len = engine.group_by_stack.len();
for (index, field) in engine.group_by_stack.iter().enumerate() {
let value = match (
len,
engine.segmentations.get(index).map(|e| e.selected.as_ref()),
) {
(n, Some(Some(segment))) if len == n => Some(segment.field.clone()),
_ => None,
};
result.push(Aggregation {
value,
field: *field,
index,
});
}
result
}
/// Change the `Field` in the given `Aggregation` to the new one.
///
/// The `Aggregation` will identify the `Segmentation` to use. So this function
/// can be used to change the way a `Segmentation` is the aggregated.
///
/// Retrieve the available aggregations with [`segmentation::aggregated_by`].
///
/// # Arguments
///
/// * `engine` - The engine to use for retrieving data
/// * `aggregation` - The aggregation to change
/// * `field` - The field to aggregate the `aggregation` by.
pub fn set_aggregation(
engine: &mut Engine,
aggregation: &Aggregation,
field: &Field,
) -> Result<()> {
if let Some(e) = engine.group_by_stack.get_mut(aggregation.index) {
*e = *field;
}
// Remove any rows that were cached for this Segmentation
engine.item_cache.clear();
engine
.link
.request(&make_query(engine)?, Action::RecalculateSegmentation)
}
/// Return the `Segment`s in the current `Segmentation`. Apply layout based on `Rect`.
///
/// It will perform the calculations so that all segments fit into bounds.
/// The results will be applied to each `Segment`.
///
/// Returns the layouted segments.
///
/// # Arguments
///
/// * `engine` - The engine to use for retrieving data
/// * `Rect` - The bounds into which the segments have to fit.
pub fn layouted_segments(engine: &mut Engine, bounds: types::Rect) -> Option<&[Segment]> {
let segmentation = engine.segmentations.last_mut()?;
segmentation.update_layout(bounds);
Some(segmentation.items())
}
/// Can another level of aggregation be performed? Based on
/// [`Engine::default_group_by_stack`]
pub fn can_aggregate_more(engine: &Engine) -> bool {
let index = engine.group_by_stack.len();
super::engine::default_group_by_stack(index).is_some()
}
/// Perform the query that returns an aggregated `Segmentation`
pub(super) fn make_query(engine: &Engine) -> Result<Query> {
let mut filters = Vec::new();
for entry in &engine.search_stack {
filters.push(Filter::Like(entry.clone()));
}
for entry in &engine.filters {
filters.push(entry.clone());
}
let last = engine
.group_by_stack
.last()
.ok_or_else(|| eyre!("Invalid Segmentation state"))?;
Ok(Query::Grouped {
filters,
group_by: *last,
})
}

@ -0,0 +1,27 @@
use crate::database::query::{Field, ValueField};
/// A aggregation field.
/// Contains the `Field` to aggregate by, the `Value` used for aggregation
/// As well as the index in the stack of Segmentations that this relates to.
pub struct Aggregation {
pub(in super::super) value: Option<ValueField>,
pub(in super::super) field: Field,
pub(in super::super) index: usize,
}
impl Aggregation {
/// Return the value in this aggregation as a string
pub fn value(&self) -> Option<String> {
self.value.as_ref().map(|e| e.value().to_string())
}
/// The name of the field as a `String`
pub fn name(&self) -> &str {
self.field.name()
}
/// The indes of the field within the given fields
pub fn index(&self, in_fields: &[Field]) -> Option<usize> {
in_fields.iter().position(|p| p == &self.field)
}
}

@ -0,0 +1,8 @@
use crate::database::query_result::QueryRow;
/// Is a individual row/item being loaded or already loaded.
/// Used in a cache to improve the loading of data for the UI.
pub enum LoadingState {
Loaded(QueryRow),
Loading,
}

@ -0,0 +1,11 @@
mod aggregation;
mod loading_state;
mod rect;
mod segment;
mod segmentation;
pub use aggregation::Aggregation;
pub use loading_state::LoadingState;
pub use rect::Rect;
pub use segment::*;
pub use segmentation::*;

@ -0,0 +1,18 @@
/// Sort of mirror `egui::rect` for simplicity
pub struct Rect {
pub left: f64,
pub top: f64,
pub width: f64,
pub height: f64,
}
impl Rect {
pub fn new(min: (f64, f64), max: (f64, f64)) -> Rect {
Rect {
left: min.0,
top: min.1,
width: max.0 - min.0,
height: max.1 - min.1,
}
}
}

@ -0,0 +1,57 @@
use super::Rect;
use std::convert::TryFrom;
use eyre::{Report, Result};
use treemap::{self, Mappable};
use crate::database::{query::ValueField, query_result::QueryResult};
#[derive(Debug, Clone)]
pub struct Segment {
pub field: ValueField,
pub count: usize,
/// A TreeMap Rect
pub rect: treemap::Rect,
}
impl Segment {
/// Perform rect conversion from TreeMap to the public type
pub fn layout_rect(&self) -> Rect {
Rect::new(
(self.rect.x, self.rect.y),
(self.rect.x + self.rect.w, self.rect.y + self.rect.h),
)
}
}
impl Mappable for Segment {
fn size(&self) -> f64 {
self.count as f64
}
fn bounds(&self) -> &treemap::Rect {
&self.rect
}
fn set_bounds(&mut self, bounds: treemap::Rect) {
self.rect = bounds;
}
}
impl<'a> TryFrom<&'a QueryResult> for Segment {
type Error = Report;
fn try_from(result: &'a QueryResult) -> Result<Self> {
let (count, field) = match result {
QueryResult::Grouped { count, value } => (count, value),
_ => return Err(eyre::eyre!("Invalid result type, expected `Grouped`")),
};
// so far we can only support one group by at a time.
// at least in here. The queries support it
Ok(Segment {
field: field.clone(),
count: *count,
rect: treemap::Rect::new(),
})
}
}

@ -0,0 +1,53 @@
use treemap::{self, TreemapLayout};
use super::segment::Segment;
use super::Rect;
/// A small NewType so that we can keep all the `TreeMap` code in here and don't
/// have to do the layout calculation in a widget.
#[derive(Debug)]
pub struct Segmentation {
items: Vec<Segment>,
pub selected: Option<Segment>,
pub range: Option<std::ops::RangeInclusive<usize>>,
}
impl Segmentation {
pub fn new(items: Vec<Segment>) -> Self {
Self {
items,
selected: None,
range: None,
}
}
pub fn len(&self) -> usize {
self.items.len()
}
/// Update the layout information in the Segments
/// based on the current size
pub fn update_layout(&mut self, rect: Rect) {
let layout = TreemapLayout::new();
let bounds = treemap::Rect::from_points(rect.left, rect.top, rect.width, rect.height);
layout.layout_items(self.items(), bounds);
}
/// The total amount of items in all the `Segments`.
/// E.g. the sum of the count of the `Segments`
pub fn element_count(&self) -> usize {
self.items.iter().map(|e| e.count).sum::<usize>()
}
/// The items in this `Segmentation`, with range applied
pub fn items(&mut self) -> &mut [Segment] {
match &self.range {
Some(n) => {
// we reverse the range
let reversed_range = (self.len() - n.end())..=(self.len() - 1);
&mut self.items[reversed_range]
}
None => self.items.as_mut_slice(),
}
}
}

@ -0,0 +1,203 @@
use eyre::{eyre, Result};
use rand::Rng;
use serde_json::Value;
use strum::{self, IntoEnumIterator};
use strum_macros::{EnumIter, IntoStaticStr};
use std::collections::{HashMap, HashSet};
use std::iter::FromIterator;
use std::path::{Path, PathBuf};
use std::str::FromStr;
// use super::ImporterFormatType;
#[derive(Debug, Clone, Copy, PartialEq, Eq, IntoStaticStr, EnumIter)]
pub enum FormatType {
AppleMail,
GmailVault,
Mbox,
}
impl FormatType {
pub fn all_cases() -> impl Iterator<Item = FormatType> {
FormatType::iter()
}
pub fn name(&self) -> &'static str {
match self {
FormatType::AppleMail => "Apple Mail",
FormatType::GmailVault => "Gmail Vault Download",
FormatType::Mbox => "Mbox",
}
}
/// Forward the importer format location
pub fn default_path(&self) -> Option<PathBuf> {
todo!()
// use crate::importer::formats::{self, ImporterFormat};
// match self {
// FormatType::AppleMail => formats::AppleMail::default_path(),
// FormatType::GmailVault => formats::Gmail::default_path(),
// FormatType::Mbox => formats::Mbox::default_path(),
// }
}
}
impl Default for FormatType {
/// We return a different default, based on the platform we're on
/// FIXME: We don't have support for Outlook yet, so on windows we go with Mbox as well
fn default() -> Self {
#[cfg(target_os = "macos")]
return FormatType::AppleMail;
#[cfg(not(target_os = "macos"))]
return FormatType::Mbox;
}
}
impl From<&String> for FormatType {
fn from(format: &String) -> Self {
FormatType::from(format.as_str())
}
}
impl From<&str> for FormatType {
fn from(format: &str) -> Self {
match format {
"apple" => FormatType::AppleMail,
"gmailvault" => FormatType::GmailVault,
"mbox" => FormatType::Mbox,
_ => panic!("Unknown format: {}", &format),
}
}
}
impl From<FormatType> for String {
fn from(format: FormatType) -> Self {
match format {
FormatType::AppleMail => "apple".to_owned(),
FormatType::GmailVault => "gmailvault".to_owned(),
FormatType::Mbox => "mbox".to_owned(),
}
}
}
#[derive(Debug, Clone)]
pub struct Config {
/// The path to where the database should be stored
pub database_path: PathBuf,
/// The path where the emails are
pub emails_folder_path: PathBuf,
/// The addresses used to send emails
pub sender_emails: HashSet<String>,
/// The importer format we're using
pub format: FormatType,
/// Did the user intend to keep the database
/// (e.g. is the database path temporary?)
pub persistent: bool,
}
impl Config {
/// Construct a config from a hashmap of field values.
/// For missing fields, take a reasonable default value,
/// in order to be somewhat backwards compatible.
pub fn from_fields<P: AsRef<Path>>(path: P, fields: HashMap<String, Value>) -> Result<Config> {
// The following fields are of version 1.0, so they should aways exist
let emails_folder_path_str = fields
.get("emails_folder_path")
.ok_or_else(|| eyre!("Missing config field emails_folder_path"))?
.as_str()
.ok_or_else(|| eyre!("Invalid field type for emails_folder_path"))?;
let emails_folder_path = PathBuf::from_str(emails_folder_path_str).map_err(|e| {
eyre!(
"Invalid emails_folder_path: {}: {}",
&emails_folder_path_str,
e
)
})?;
#[allow(clippy::needless_collect)]
let sender_emails: Vec<String> = fields
.get("sender_emails")
.map(|v| v.as_str().map(|e| e.to_string()))
.flatten()
.ok_or_else(|| eyre!("Missing config field sender_emails"))?
.split(',')
.map(|e| e.trim().to_owned())
.collect();
let format = fields
.get("format")
.map(|e| e.as_str())
.flatten()
.map(FormatType::from)
.ok_or_else(|| eyre!("Missing config field format_type"))?;
let persistent = fields
.get("persistent")
.map(|e| e.as_bool())
.flatten()
.ok_or_else(|| eyre!("Missing config field persistent"))?;
Ok(Config {
database_path: path.as_ref().to_path_buf(),
emails_folder_path,
sender_emails: HashSet::from_iter(sender_emails.into_iter()),
format,
persistent,
})
}
pub fn new<A: AsRef<Path>>(
db: Option<A>,
mails: A,
sender_emails: Vec<String>,
format: FormatType,
) -> eyre::Result<Self> {
// If we don't have a database path, we use a temporary folder.
let persistent = db.is_some();
let database_path = match db {
Some(n) => n.as_ref().to_path_buf(),
None => {
let number: u32 = rand::thread_rng().gen();
let folder = "postsack";
let filename = format!("{}.sqlite", number);
let mut temp_dir = std::env::temp_dir();
temp_dir.push(folder);
// the folder has to be created
std::fs::create_dir_all(&temp_dir)?;
temp_dir.push(filename);
temp_dir
}
};
Ok(Config {
database_path,
emails_folder_path: mails.as_ref().to_path_buf(),
sender_emails: HashSet::from_iter(sender_emails.into_iter()),
format,
persistent,
})
}
pub fn into_fields(&self) -> Option<HashMap<String, Value>> {
let mut new = HashMap::new();
new.insert(
"database_path".to_owned(),
self.database_path.to_str()?.into(),
);
new.insert(
"emails_folder_path".to_owned(),
self.emails_folder_path.to_str()?.into(),
);
new.insert("persistent".to_owned(), self.persistent.into());
new.insert(
"sender_emails".to_owned(),
self.sender_emails
.iter()
.cloned()
.collect::<Vec<String>>()
.join(",")
.into(),
);
let format: String = self.format.into();
new.insert("format".to_owned(), format.into());
Some(new)
}
}

@ -0,0 +1,10 @@
// use std::default::Default;
// use std::path::PathBuf;
// pub trait ImporterFormatType: Default + Clone + Copy + Eq {
// // fn all_formats() -> Vec<Self>
// // where
// // Self: Sized;
// fn name(&self) -> &'static str;
// fn default_path(&self) -> Option<PathBuf>;
// }

@ -0,0 +1,4 @@
mod config;
mod format_type;
pub use config::{Config, FormatType};
// pub use format_type::ImporterFormatType;
Loading…
Cancel
Save