mirror of https://github.com/sayanarijit/xplr
Compare commits
136 Commits
Author | SHA1 | Date |
---|---|---|
Arijit Basu | e0b0466e42 | 4 weeks ago |
Arijit Basu | 805e1594ed | 4 weeks ago |
Arijit Basu | 41648ced34 | 4 weeks ago |
Arijit Basu | 89d7bccce8 | 4 weeks ago |
Arijit Basu | e15c1e8a8c | 4 weeks ago |
Arijit Basu | 8afdf9e478 | 1 month ago |
Abhinav Natarajan | a48dae008c | 1 month ago |
Arijit Basu | ad8afa9d38 | 1 month ago |
Arijit Basu | c2a11059c8 | 1 month ago |
Arijit Basu | 6d7ccce282 | 1 month ago |
Arijit Basu | 90df0a2b5a | 1 month ago |
Arijit Basu | ce52bcdf94 | 1 month ago |
Arijit Basu | 6fb0781fe4 | 1 month ago |
Arijit Basu | c1bb251fef | 2 months ago |
Arijit Basu | 976530ba70 | 2 months ago |
Arijit Basu | 96da7e1da8 | 2 months ago |
Arijit Basu | 96ffe8680b | 2 months ago |
Ahmed ElSamhaa | 1600ad9a9c | 2 months ago |
Ahmed ElSamhaa | 2a3d056bf1 | 2 months ago |
Ahmed ElSamhaa | 91276f6871 | 2 months ago |
Ahmed ElSamhaa | 00bd54abe9 | 2 months ago |
Ahmed ElSamhaa | 95621af9eb | 2 months ago |
Ahmed ElSamhaa | 5240b3904b | 2 months ago |
Ahmed ElSamhaa | a6fb695ff9 | 2 months ago |
Ahmed ElSamhaa | fd40de26e7 | 2 months ago |
Ahmed ElSamhaa | 87805509c5 | 2 months ago |
Ahmed ElSamhaa | 4aa367ca7c | 2 months ago |
Ahmed ElSamhaa | 01606e0e60 | 2 months ago |
Ahmed ElSamhaa | e834242f5d | 2 months ago |
alice | 7c6dffc2c6 | 2 months ago |
har7an | d5217f6677 | 2 months ago |
Arijit Basu | 0285f0824c | 3 months ago |
Arijit Basu | a6b19425ae | 3 months ago |
Arijit Basu | 9db8b2cc19 | 3 months ago |
mikoloism | 68500f3a8e | 3 months ago |
Arijit Basu | ded2e108bf | 4 months ago |
Arijit Basu | 6e8f3da971 | 4 months ago |
Arijit Basu | d76a70fed4 | 4 months ago |
Arijit Basu | 16673963aa | 4 months ago |
Arijit Basu | b0ef9a5190 | 4 months ago |
Arijit Basu | b70337708c | 4 months ago |
Arijit Basu | 9127d15494 | 4 months ago |
Arijit Basu | 66d9f7e586 | 4 months ago |
Arijit Basu | eab47a9044 | 4 months ago |
Arijit Basu | a9e3752f56 | 4 months ago |
Arijit Basu | 470bea1265 | 4 months ago |
Arijit Basu | cc578aaf0a | 5 months ago |
Arijit Basu | 50e81853fe | 5 months ago |
Arijit Basu | 414b45e4fd | 5 months ago |
Arijit Basu | 75dabeb283 | 5 months ago |
Arijit Basu | 1629398adf | 6 months ago |
Arijit Basu | dd8bb74dd4 | 6 months ago |
Felix Yan | 1dc5eae8fc | 6 months ago |
Arijit Basu | 484b94a961 | 7 months ago |
Arijit Basu | 50d9d1c54b | 9 months ago |
Dugan Chen | c7c3d2d7f6 | 9 months ago |
Arijit Basu | 1441275860 | 10 months ago |
Arijit Basu | 8af1647c09 | 10 months ago |
Arijit Basu | 22b5fca8d9 | 10 months ago |
Arijit Basu | 4a3f18100d | 10 months ago |
Lewis Cook | 6df168f8c1 | 10 months ago |
Arijit Basu | eeb7b5d684 | 10 months ago |
Arijit Basu | 9a7ff5846d | 11 months ago |
Arijit Basu | 1b2226512f | 11 months ago |
Arijit Basu | 56472998f5 | 11 months ago |
Arijit Basu | bf7ae3f748 | 11 months ago |
Arijit Basu | 94ba22bbcc | 11 months ago |
Arijit Basu | 567a6201a8 | 11 months ago |
Arijit Basu | 54d6d19003 | 11 months ago |
Dugan Chen | 4aeb3dd7c8 | 11 months ago |
Dugan Chen | 5626422ba4 | 11 months ago |
Arijit Basu | 1941355128 | 11 months ago |
Arijit Basu | 2f78691333 | 11 months ago |
Arijit Basu | a2fbf759dd | 11 months ago |
Arijit Basu | bc7f3cbbcf | 11 months ago |
Arijit Basu | ad50342260 | 11 months ago |
Arijit Basu | 313c61db96 | 11 months ago |
Arijit Basu | 255517c2a9 | 11 months ago |
Arijit Basu | 9844ae1476 | 11 months ago |
Arijit Basu | d282032b3d | 11 months ago |
Arijit Basu | ba26752f6c | 11 months ago |
Arijit Basu | 0cc8723e8e | 11 months ago |
Arijit Basu | 2f3c2ea0e4 | 11 months ago |
Arijit Basu | 219ee68152 | 11 months ago |
Arijit Basu | 859d888bde | 11 months ago |
Arijit Basu | f84d9d5c6a | 11 months ago |
Arijit Basu | 3fcfb1dbef | 11 months ago |
Arijit Basu | 4c51f0affe | 11 months ago |
Arijit Basu | 9d1bd99fd4 | 11 months ago |
Arijit Basu | 8209988ba6 | 11 months ago |
Arijit Basu | 33c5aa9f14 | 11 months ago |
Arijit Basu | cae50e4bcf | 11 months ago |
har7an | 048b1c701a | 12 months ago |
Arijit Basu | 508f4b980b | 12 months ago |
Karim Lalani | 28c9e0e3a0 | 12 months ago |
Noah Mayr | 4ccd9796c4 | 12 months ago |
Solitude | 36a7f1dc17 | 1 year ago |
Arijit Basu | 2cc8e0c510 | 1 year ago |
Arijit Basu | 27bc1217b3 | 1 year ago |
Arijit Basu | ab90381fda | 1 year ago |
Arijit Basu | 2a775371f6 | 1 year ago |
Arijit Basu | 3bee8060c7 | 1 year ago |
Arijit Basu | 97e30e2a6f | 1 year ago |
Arijit Basu | 7c26c48e18 | 1 year ago |
Arijit Basu | 17269ab17f | 1 year ago |
Arijit Basu | 8aff0ba918 | 1 year ago |
Arijit Basu | 4228a71ed9 | 1 year ago |
Arijit Basu | 252a1f5c37 | 1 year ago |
Henrique Goulart | 4f0db1f3e3 | 1 year ago |
Kian-Meng Ang | 8cca2d3566 | 1 year ago |
Arijit Basu | b995be0089 | 1 year ago |
Arijit Basu | c79175764b | 1 year ago |
Arijit Basu | e0d683b13a | 1 year ago |
Arijit Basu | 59279b816d | 1 year ago |
Arijit Basu | 8c4f744bb1 | 1 year ago |
Arijit Basu | d80b1b4db8 | 1 year ago |
Arijit Basu | 5f07e6143f | 1 year ago |
Arijit Basu | 43c88b4873 | 1 year ago |
Arijit Basu | d52ccac8ba | 1 year ago |
Arijit Basu | 26d79bd799 | 1 year ago |
Arijit Basu | 553b4ed3d6 | 1 year ago |
Arijit Basu | e9fc643bd9 | 1 year ago |
Arijit Basu | 3afccf2a54 | 1 year ago |
Arijit Basu | 1d9d5f5145 | 1 year ago |
Emanuel | 0715e242ef | 1 year ago |
Emanuel | 006c655e3a | 1 year ago |
Emanuel | 105e770f58 | 1 year ago |
emanuel | d6e33e68e3 | 1 year ago |
BoolPurist | 078da205ca | 1 year ago |
BoolPurist | caa365b4a0 | 1 year ago |
emanuel | 4c4e7f41b4 | 1 year ago |
Arijit Basu | e6e701b371 | 1 year ago |
Arijit Basu | 0cd5a9163d | 1 year ago |
Arijit Basu | ac958c9532 | 1 year ago |
Arijit Basu | 7fbcd18bb4 | 1 year ago |
Arijit Basu | 1369fcea9a | 1 year ago |
@ -1,4 +1,14 @@
|
||||
# Why dynamic linking?
|
||||
# See https://github.com/sayanarijit/xplr/issues/309
|
||||
|
||||
[target.x86_64-unknown-linux-gnu]
|
||||
rustflags = ["-C", "link-args=-rdynamic"]
|
||||
|
||||
[target.aarch64-unknown-linux-gnu]
|
||||
rustflags = ["-C", "linker=aarch64-linux-gnu-gcc", "-C", "link-args=-rdynamic"]
|
||||
|
||||
[target.aarch64-linux-android]
|
||||
rustflags = ["-C", "linker=aarch64-linux-android-clang", "-C", "link-args=-rdynamic", "-C", "default-linker-libraries"]
|
||||
|
||||
[target.arm-unknown-linux-gnueabihf]
|
||||
rustflags = ["-C", "linker=arm-linux-gnueabihf-gcc", "-C", "link-args=-rdynamic"]
|
||||
|
@ -0,0 +1,6 @@
|
||||
ratatui
|
||||
crate
|
||||
ser
|
||||
enque
|
||||
noice
|
||||
ans
|
File diff suppressed because it is too large
Load Diff
@ -1,12 +0,0 @@
|
||||
# Community
|
||||
|
||||
Building an active community of awesome people and learning stuff together is
|
||||
one of my reasons to publish this tool and maintain it. Hence, please feel free
|
||||
to reach out via your preferred way.
|
||||
|
||||
- Real-time chat lovers can join our [**matrix room**][3] or [**discord channel**][1].
|
||||
- Forum discussion veterans can [**start a new GitHub discussion**][2].
|
||||
|
||||
[1]: https://discord.gg/JmasSPCcz3
|
||||
[2]: https://github.com/sayanarijit/xplr/discussions
|
||||
[3]: https://matrix.to/#/#xplr-pub:matrix.org
|
@ -1,32 +0,0 @@
|
||||
If you like xplr, and want to contribute, that would be really awesome.
|
||||
|
||||
You can contribute to this project in the following ways
|
||||
|
||||
- Contribute your time and expertise (read [CONTRIBUTING.md][1] for instructions).
|
||||
|
||||
- **Developers:** You can help me improve my code, fix things, implement features etc.
|
||||
- **Repository maintainers:** You can save the users from the pain of managing xplr in their system manually.
|
||||
- **Code Reviewers:** Teach me your ways of code.
|
||||
- **Designers:** You can make the logo even more awesome, donate stickers and blog post worthy pictures.
|
||||
- **Bloggers, YouTubers & broadcasters:** You can help spread the word.
|
||||
|
||||
- Contribute by donating or sponsoring me via any of the following ways.
|
||||
- [GitHub Sponsors][5]
|
||||
- [Open Collective][2]
|
||||
- [ko-fi][3]
|
||||
- [liberapay][6]
|
||||
- [PayPal][7]
|
||||
|
||||
For further queries or concern related to `xplr`, [just ask us][4].
|
||||
|
||||
### Backers
|
||||
|
||||
<a href="https://opencollective.com/xplr#backer"><img src="https://opencollective.com/xplr/tiers/backer.svg?width=890" /></a>
|
||||
|
||||
[1]: https://github.com/sayanarijit/xplr/blob/main/CONTRIBUTING.md
|
||||
[2]: https://opencollective.com/xplr
|
||||
[3]: https://ko-fi.com/sayanarijit
|
||||
[4]: community.md
|
||||
[5]: https://github.com/sponsors/sayanarijit?o=esb
|
||||
[6]: https://liberapay.com/sayanarijit
|
||||
[7]: https://paypal.me/sayanarijit
|
@ -0,0 +1,77 @@
|
||||
# Searching
|
||||
|
||||
xplr supports searching paths using different algorithm. The search mechanism
|
||||
generally appears between filters and sorters in the `Sort & filter` panel.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
fzy:foo↓
|
||||
```
|
||||
|
||||
This line means that the nodes visible on the table are being filtered using the
|
||||
[fuzzy matching][1] algorithm on the input `foo`. The arrow means that ranking based
|
||||
ordering is being applied, i.e. [sorters][2] are being ignored.
|
||||
|
||||
## Node Searcher Applicable
|
||||
|
||||
Node Searcher contains the following fields:
|
||||
|
||||
- [pattern][3]
|
||||
- [recoverable_focus][4]
|
||||
- [algorithm][5]
|
||||
- [unordered][7]
|
||||
|
||||
### pattern
|
||||
|
||||
The patterns used to search.
|
||||
|
||||
Type: string
|
||||
|
||||
### recoverable_focus
|
||||
|
||||
Where to focus when search is cancelled.
|
||||
|
||||
Type: nullable string
|
||||
|
||||
### algorithm
|
||||
|
||||
Search algorithm to use. Defaults to the value set in
|
||||
[xplr.config.general.search.algorithm][8].
|
||||
|
||||
It can be one of the following:
|
||||
|
||||
- Fuzzy
|
||||
- Regex
|
||||
|
||||
### unordered
|
||||
|
||||
Whether to skip ordering the search result by algorithm based ranking. Defaults
|
||||
to the value set in [xplr.config.general.search.unordered][9].
|
||||
|
||||
Type: boolean
|
||||
|
||||
## Example:
|
||||
|
||||
```lua
|
||||
local searcher = {
|
||||
pattern = "pattern to search",
|
||||
recoverable_focus = "/path/to/focus/on/cancel",
|
||||
algorithm = "Fuzzy",
|
||||
unordered = false,
|
||||
}
|
||||
|
||||
xplr.util.explore({ searcher = searcher })
|
||||
```
|
||||
|
||||
See [xplr.util.explore][6].
|
||||
|
||||
[1]: https://en.wikipedia.org/wiki/Approximate_string_matching
|
||||
[2]: sorting.md
|
||||
[3]: #pattern
|
||||
[4]: #recoverable_focus
|
||||
[5]: #algorithm
|
||||
[6]: xplr.util.md#xplrutilexplore
|
||||
[7]: #unordered
|
||||
[8]: general-config.md#xplrconfiggeneralsearchalgorithm
|
||||
[9]: general-config.md#xplrconfiggeneralsearchunordered
|
@ -0,0 +1,96 @@
|
||||
# Sum Type
|
||||
|
||||
> This section isn't specific to xplr. However, since xplr configuration makes
|
||||
> heavy use of this particular data type, even though it isn't available in
|
||||
> most of the mainstream programming languages (yet), making it a wild or
|
||||
> unfamiliar concept for many, it's worth doing a quick introduction here.
|
||||
>
|
||||
> If you're already familiar with [Sum Type / Tagged Union][1] (e.g. Rust's
|
||||
> enum), you can skip ahead.
|
||||
|
||||
While reading this doc, you'll come across some data types like [Layout][2],
|
||||
[Color][4], [Message][3] etc. that says something like "x is a sum type that
|
||||
can be any of the following", and then you'll see a list of strings and/or lua
|
||||
tables just below.
|
||||
|
||||
Yes, they are actually sum types, i.e. they can be any of the given set of
|
||||
tagged variants listed there.
|
||||
|
||||
Notice the word "be". Unlike classes or structs (aka product types), they can't
|
||||
"have" values, they can only "be" the value, or rather, be one of the possible
|
||||
set of values.
|
||||
|
||||
Also notice the word "tagged". Unlike the single variant `null`, or the dual
|
||||
variant `boolean` types, the variants of sum types are tagged (i.e. named), and
|
||||
may further have, or be, value or values of any data type.
|
||||
|
||||
A simple example of a sum type is an enum. Many programming languages have
|
||||
them, but only a few modern programming languages allow nesting other types
|
||||
into a sum type.
|
||||
|
||||
```rust
|
||||
enum Color {
|
||||
Red,
|
||||
Green,
|
||||
}
|
||||
```
|
||||
|
||||
Here, `Color` can be one of two possible set of values: `Red` and `Green`, just
|
||||
like `boolean`, but unlike `boolean`, being tagged allows `Color` to have more
|
||||
than two variants if required, by changing the definition.
|
||||
|
||||
e.g.
|
||||
|
||||
```rust
|
||||
enum Color {
|
||||
Red,
|
||||
Green,
|
||||
Blue,
|
||||
}
|
||||
```
|
||||
|
||||
We'd document it here as:
|
||||
|
||||
> Result is a sum type that can be one of the following:
|
||||
>
|
||||
> - "Red"
|
||||
> - "Green"
|
||||
> - "Blue"
|
||||
|
||||
But some languages (like Rust, Haskell, Elm etc.) go even further, allowing us
|
||||
to associate each branch of the enum with further nested types like:
|
||||
|
||||
```rust
|
||||
enum Layout {
|
||||
Table,
|
||||
HelpMenu,
|
||||
Horizontal {
|
||||
config: LayoutConfig, // A product type (similar to class/struct)
|
||||
splits: Vec<Layout> // A list of "Layout"s (i.e. list of sum types)
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
Here, as we can see, unlike the first example, some of `Layout`'s possible
|
||||
variants can have further nested types associated with them. Note that
|
||||
`Horizontal` here can have a sum type (e.g. enum), or a product type (e.g.
|
||||
class/struct), or both (any number of them actually) nested in it. But the
|
||||
nested values will only exist when `Layout` is `Horizontal`.
|
||||
|
||||
We'd document it here as:
|
||||
|
||||
> Layout is a sum type that can be one of the following:
|
||||
>
|
||||
> - "Table"
|
||||
> - "HelpMenu"
|
||||
> - { Horizontal = { config = Layout Config, splits = { Layout, ... } }
|
||||
|
||||
And then we'd go on documenting whatever `Layout Config` is.
|
||||
|
||||
So, there you go. This is exactly what sum types are - glorified enums that can
|
||||
have nested types in each branch.
|
||||
|
||||
[1]: https://en.wikipedia.org/wiki/Tagged_union
|
||||
[2]: layout.md
|
||||
[3]: message.md
|
||||
[4]: style.md#color
|
@ -0,0 +1,11 @@
|
||||
v="0.4.37"
|
||||
|
||||
curl -L https://github.com/rust-lang/mdBook/releases/download/v$v/mdbook-v$v-x86_64-unknown-linux-gnu.tar.gz -o mdbook.tgz \
|
||||
&& tar xzvf mdbook.tgz \
|
||||
&& ./mdbook build docs/en \
|
||||
&& mkdir dist \
|
||||
&& mv -v docs/en/book/html dist/en \
|
||||
&& mv -v assets dist \
|
||||
&& mv -v docs/landing/index.html docs/landing/css docs/landing/js dist \
|
||||
&& rm -v mdbook \
|
||||
&& rm -v mdbook.tgz
|
@ -0,0 +1,26 @@
|
||||
name: xplr
|
||||
version: git
|
||||
summary: A hackable, minimal, fast TUI file explorer
|
||||
description: |
|
||||
xplr is a terminal UI based file explorer
|
||||
that aims to increase our terminal productivity by being a flexible,
|
||||
interactive orchestrator for the ever growing awesome command-line
|
||||
utilities that work with the file-system.
|
||||
source-code: https://github.com/sayanarijit/xplr
|
||||
issues: https://github.com/sayanarijit/xplr/issues
|
||||
website: https://xplr.dev/
|
||||
|
||||
base: core20
|
||||
grade: devel # must be 'stable' to release into candidate/stable channels
|
||||
confinement: devmode # use 'strict' once you have the right plugs and slots
|
||||
|
||||
|
||||
parts:
|
||||
xplr:
|
||||
plugin: rust
|
||||
source: .
|
||||
|
||||
apps:
|
||||
xplr:
|
||||
command: bin/xplr
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,224 @@
|
||||
// Things of the past, mostly bad decisions, which cannot erased, stays in this
|
||||
// haunted module.
|
||||
|
||||
use crate::app;
|
||||
use crate::lua;
|
||||
use crate::ui::block;
|
||||
use crate::ui::string_to_text;
|
||||
use crate::ui::Constraint;
|
||||
use crate::ui::ContentRendererArg;
|
||||
use crate::ui::UI;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tui::layout::Constraint as TuiConstraint;
|
||||
use tui::layout::Rect as TuiRect;
|
||||
use tui::widgets::Cell;
|
||||
use tui::widgets::List;
|
||||
use tui::widgets::ListItem;
|
||||
use tui::widgets::Paragraph;
|
||||
use tui::widgets::Row;
|
||||
use tui::widgets::Table;
|
||||
use tui::Frame;
|
||||
|
||||
/// A cursed enum from crate::ui.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub enum ContentBody {
|
||||
/// A paragraph to render
|
||||
StaticParagraph { render: String },
|
||||
|
||||
/// A Lua function that returns a paragraph to render
|
||||
DynamicParagraph { render: String },
|
||||
|
||||
/// List to render
|
||||
StaticList { render: Vec<String> },
|
||||
|
||||
/// A Lua function that returns lines to render
|
||||
DynamicList { render: String },
|
||||
|
||||
/// A table to render
|
||||
StaticTable {
|
||||
widths: Vec<Constraint>,
|
||||
col_spacing: Option<u16>,
|
||||
render: Vec<Vec<String>>,
|
||||
},
|
||||
|
||||
/// A Lua function that returns a table to render
|
||||
DynamicTable {
|
||||
widths: Vec<Constraint>,
|
||||
col_spacing: Option<u16>,
|
||||
render: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// A cursed struct from crate::ui.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct CustomContent {
|
||||
pub title: Option<String>,
|
||||
pub body: ContentBody,
|
||||
}
|
||||
|
||||
/// A cursed function from crate::ui.
|
||||
pub fn draw_custom_content(
|
||||
ui: &mut UI,
|
||||
f: &mut Frame,
|
||||
layout_size: TuiRect,
|
||||
app: &app::App,
|
||||
content: CustomContent,
|
||||
) {
|
||||
let config = app.config.general.panel_ui.default.clone();
|
||||
let title = content.title;
|
||||
let body = content.body;
|
||||
|
||||
match body {
|
||||
ContentBody::StaticParagraph { render } => {
|
||||
let render = string_to_text(render);
|
||||
let content = Paragraph::new(render).block(block(
|
||||
config,
|
||||
title.map(|t| format!(" {t} ")).unwrap_or_default(),
|
||||
));
|
||||
f.render_widget(content, layout_size);
|
||||
}
|
||||
|
||||
ContentBody::DynamicParagraph { render } => {
|
||||
let ctx = ContentRendererArg {
|
||||
app: app.to_lua_ctx_light(),
|
||||
layout_size: layout_size.into(),
|
||||
screen_size: ui.screen_size.into(),
|
||||
scrolltop: ui.scrolltop as u16,
|
||||
};
|
||||
|
||||
let render = lua::serialize(ui.lua, &ctx)
|
||||
.map(|arg| {
|
||||
lua::call(ui.lua, &render, arg).unwrap_or_else(|e| format!("{e:?}"))
|
||||
})
|
||||
.unwrap_or_else(|e| e.to_string());
|
||||
|
||||
let render = string_to_text(render);
|
||||
|
||||
let content = Paragraph::new(render).block(block(
|
||||
config,
|
||||
title.map(|t| format!(" {t} ")).unwrap_or_default(),
|
||||
));
|
||||
f.render_widget(content, layout_size);
|
||||
}
|
||||
|
||||
ContentBody::StaticList { render } => {
|
||||
let items = render
|
||||
.into_iter()
|
||||
.map(string_to_text)
|
||||
.map(ListItem::new)
|
||||
.collect::<Vec<ListItem>>();
|
||||
|
||||
let content = List::new(items).block(block(
|
||||
config,
|
||||
title.map(|t| format!(" {t} ")).unwrap_or_default(),
|
||||
));
|
||||
f.render_widget(content, layout_size);
|
||||
}
|
||||
|
||||
ContentBody::DynamicList { render } => {
|
||||
let ctx = ContentRendererArg {
|
||||
app: app.to_lua_ctx_light(),
|
||||
layout_size: layout_size.into(),
|
||||
screen_size: ui.screen_size.into(),
|
||||
scrolltop: ui.scrolltop as u16,
|
||||
};
|
||||
|
||||
let items = lua::serialize(ui.lua, &ctx)
|
||||
.map(|arg| {
|
||||
lua::call(ui.lua, &render, arg)
|
||||
.unwrap_or_else(|e| vec![format!("{e:?}")])
|
||||
})
|
||||
.unwrap_or_else(|e| vec![e.to_string()])
|
||||
.into_iter()
|
||||
.map(string_to_text)
|
||||
.map(ListItem::new)
|
||||
.collect::<Vec<ListItem>>();
|
||||
|
||||
let content = List::new(items).block(block(
|
||||
config,
|
||||
title.map(|t| format!(" {t} ")).unwrap_or_default(),
|
||||
));
|
||||
f.render_widget(content, layout_size);
|
||||
}
|
||||
|
||||
ContentBody::StaticTable {
|
||||
widths,
|
||||
col_spacing,
|
||||
render,
|
||||
} => {
|
||||
let rows = render
|
||||
.into_iter()
|
||||
.map(|cols| {
|
||||
Row::new(
|
||||
cols.into_iter()
|
||||
.map(string_to_text)
|
||||
.map(Cell::from)
|
||||
.collect::<Vec<Cell>>(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<Row>>();
|
||||
|
||||
let widths = widths
|
||||
.into_iter()
|
||||
.map(|w| w.to_tui(ui.screen_size, layout_size))
|
||||
.collect::<Vec<TuiConstraint>>();
|
||||
|
||||
let content = Table::new(rows, widths)
|
||||
.column_spacing(col_spacing.unwrap_or(1))
|
||||
.block(block(
|
||||
config,
|
||||
title.map(|t| format!(" {t} ")).unwrap_or_default(),
|
||||
));
|
||||
|
||||
f.render_widget(content, layout_size);
|
||||
}
|
||||
|
||||
ContentBody::DynamicTable {
|
||||
widths,
|
||||
col_spacing,
|
||||
render,
|
||||
} => {
|
||||
let ctx = ContentRendererArg {
|
||||
app: app.to_lua_ctx_light(),
|
||||
layout_size: layout_size.into(),
|
||||
screen_size: ui.screen_size.into(),
|
||||
scrolltop: ui.scrolltop as u16,
|
||||
};
|
||||
|
||||
let rows = lua::serialize(ui.lua, &ctx)
|
||||
.map(|arg| {
|
||||
lua::call(ui.lua, &render, arg)
|
||||
.unwrap_or_else(|e| vec![vec![format!("{e:?}")]])
|
||||
})
|
||||
.unwrap_or_else(|e| vec![vec![e.to_string()]])
|
||||
.into_iter()
|
||||
.map(|cols| {
|
||||
Row::new(
|
||||
cols.into_iter()
|
||||
.map(string_to_text)
|
||||
.map(Cell::from)
|
||||
.collect::<Vec<Cell>>(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<Row>>();
|
||||
|
||||
let widths = widths
|
||||
.into_iter()
|
||||
.map(|w| w.to_tui(ui.screen_size, layout_size))
|
||||
.collect::<Vec<TuiConstraint>>();
|
||||
|
||||
let mut content = Table::new(rows, &widths).block(block(
|
||||
config,
|
||||
title.map(|t| format!(" {t} ")).unwrap_or_default(),
|
||||
));
|
||||
|
||||
if let Some(col_spacing) = col_spacing {
|
||||
content = content.column_spacing(col_spacing);
|
||||
};
|
||||
|
||||
f.render_widget(content, layout_size);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
use std::{env, path::PathBuf};
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use xdg::BaseDirectories;
|
||||
|
||||
lazy_static! {
|
||||
pub static ref BASE_DIRS: Option<BaseDirectories> = BaseDirectories::new().ok();
|
||||
}
|
||||
|
||||
pub fn home_dir() -> Option<PathBuf> {
|
||||
home::home_dir()
|
||||
}
|
||||
|
||||
pub fn config_dir() -> Option<PathBuf> {
|
||||
BASE_DIRS.as_ref().map(|base| base.get_config_home())
|
||||
}
|
||||
|
||||
pub fn runtime_dir() -> PathBuf {
|
||||
let Some(dir) = BASE_DIRS
|
||||
.as_ref()
|
||||
.and_then(|base| base.get_runtime_directory().ok())
|
||||
else {
|
||||
return env::temp_dir();
|
||||
};
|
||||
dir.clone()
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,499 @@
|
||||
use crate::dirs;
|
||||
use anyhow::{bail, Result};
|
||||
use lazy_static::lazy_static;
|
||||
use serde::{Deserialize, Serialize};
|
||||
pub use snailquote::escape;
|
||||
use std::path::{Component, Path, PathBuf};
|
||||
|
||||
lazy_static! {
|
||||
pub static ref HOME: Option<PathBuf> = dirs::home_dir();
|
||||
}
|
||||
|
||||
// Stolen from https://github.com/Manishearth/pathdiff/blob/master/src/lib.rs
|
||||
pub fn diff<P, B>(path: P, base: B) -> Result<PathBuf>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
B: AsRef<Path>,
|
||||
{
|
||||
let path = path.as_ref();
|
||||
let base = base.as_ref();
|
||||
|
||||
if path.is_absolute() != base.is_absolute() {
|
||||
if path.is_absolute() {
|
||||
Ok(PathBuf::from(path))
|
||||
} else {
|
||||
let path = path.to_string_lossy();
|
||||
bail!("{path}: is not absolute")
|
||||
}
|
||||
} else {
|
||||
let mut ita = path.components();
|
||||
let mut itb = base.components();
|
||||
let mut comps: Vec<Component> = vec![];
|
||||
loop {
|
||||
match (ita.next(), itb.next()) {
|
||||
(None, None) => break,
|
||||
(Some(a), None) => {
|
||||
comps.push(a);
|
||||
comps.extend(ita.by_ref());
|
||||
break;
|
||||
}
|
||||
(None, _) => comps.push(Component::ParentDir),
|
||||
(Some(a), Some(b)) if comps.is_empty() && a == b => (),
|
||||
(Some(a), Some(Component::CurDir)) => comps.push(a),
|
||||
(Some(_), Some(Component::ParentDir)) => {
|
||||
let path = path.to_string_lossy();
|
||||
let base = base.to_string_lossy();
|
||||
bail!("{base} is not a parent of {path}")
|
||||
}
|
||||
(Some(a), Some(_)) => {
|
||||
comps.push(Component::ParentDir);
|
||||
for _ in itb {
|
||||
comps.push(Component::ParentDir);
|
||||
}
|
||||
comps.push(a);
|
||||
comps.extend(ita.by_ref());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(comps.iter().map(|c| c.as_os_str()).collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
|
||||
pub struct RelativityConfig<B: AsRef<Path>> {
|
||||
base: Option<B>,
|
||||
with_prefix_dots: Option<bool>,
|
||||
without_suffix_dots: Option<bool>,
|
||||
}
|
||||
|
||||
impl<B: AsRef<Path>> RelativityConfig<B> {
|
||||
pub fn with_base(mut self, base: B) -> Self {
|
||||
self.base = Some(base);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_prefix_dots(mut self) -> Self {
|
||||
self.with_prefix_dots = Some(true);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn without_suffix_dots(mut self) -> Self {
|
||||
self.without_suffix_dots = Some(true);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn relative_to<P, B>(
|
||||
path: P,
|
||||
config: Option<&RelativityConfig<B>>,
|
||||
) -> Result<PathBuf>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
B: AsRef<Path>,
|
||||
{
|
||||
let path = path.as_ref();
|
||||
let base = match config.and_then(|c| c.base.as_ref()) {
|
||||
Some(base) => PathBuf::from(base.as_ref()),
|
||||
None => std::env::current_dir()?,
|
||||
};
|
||||
|
||||
let diff = diff(path, base)?;
|
||||
|
||||
let relative = if diff.to_str() == Some("") {
|
||||
".".into()
|
||||
} else {
|
||||
diff
|
||||
};
|
||||
|
||||
let relative = if config.and_then(|c| c.with_prefix_dots).unwrap_or(false)
|
||||
&& !relative.starts_with(".")
|
||||
&& !relative.starts_with("..")
|
||||
{
|
||||
PathBuf::from(".").join(relative)
|
||||
} else {
|
||||
relative
|
||||
};
|
||||
|
||||
let relative = if !config.and_then(|c| c.without_suffix_dots).unwrap_or(false) {
|
||||
relative
|
||||
} else if relative.ends_with(".") {
|
||||
match (path.parent(), path.file_name()) {
|
||||
(Some(_), Some(name)) => PathBuf::from("..").join(name),
|
||||
(_, _) => relative,
|
||||
}
|
||||
} else if relative.ends_with("..") {
|
||||
match (path.parent(), path.file_name()) {
|
||||
(Some(parent), Some(name)) => {
|
||||
if parent.parent().is_some() {
|
||||
relative.join("..").join(name)
|
||||
} else {
|
||||
// always prefer absolute path if it's a child of the root directory
|
||||
// to guarantee that the basename is included
|
||||
path.into()
|
||||
}
|
||||
}
|
||||
(_, _) => relative,
|
||||
}
|
||||
} else {
|
||||
relative
|
||||
};
|
||||
|
||||
Ok(relative)
|
||||
}
|
||||
|
||||
pub fn shorten<P, B>(path: P, config: Option<&RelativityConfig<B>>) -> Result<String>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
B: AsRef<Path>,
|
||||
{
|
||||
let path = path.as_ref();
|
||||
let pathstring = path.to_string_lossy().to_string();
|
||||
let relative = relative_to(path, config)?;
|
||||
|
||||
let relative = relative.to_string_lossy().to_string();
|
||||
|
||||
let fromhome = HOME
|
||||
.as_ref()
|
||||
.and_then(|h| {
|
||||
path.strip_prefix(h).ok().map(|p| {
|
||||
if p.to_str() == Some("") {
|
||||
"~".into()
|
||||
} else {
|
||||
PathBuf::from("~").join(p).to_string_lossy().to_string()
|
||||
}
|
||||
})
|
||||
})
|
||||
.unwrap_or(pathstring);
|
||||
|
||||
if relative.len() < fromhome.len() {
|
||||
Ok(relative)
|
||||
} else {
|
||||
Ok(fromhome)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
type Config<'a> = Option<&'a RelativityConfig<String>>;
|
||||
|
||||
const NONE: Config = Config::None;
|
||||
|
||||
fn default<'a>() -> RelativityConfig<&'a str> {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_relative_to_pwd() {
|
||||
let path = std::env::current_dir().unwrap();
|
||||
|
||||
let relative = relative_to(&path, NONE).unwrap();
|
||||
assert_eq!(relative, PathBuf::from("."));
|
||||
|
||||
let relative = relative_to(&path, Some(&default().with_prefix_dots())).unwrap();
|
||||
assert_eq!(relative, PathBuf::from("."));
|
||||
|
||||
let relative =
|
||||
relative_to(&path, Some(&default().without_suffix_dots())).unwrap();
|
||||
assert_eq!(
|
||||
relative,
|
||||
PathBuf::from("..").join(path.file_name().unwrap())
|
||||
);
|
||||
|
||||
let relative = relative_to(
|
||||
&path,
|
||||
Some(&default().with_prefix_dots().without_suffix_dots()),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
relative,
|
||||
PathBuf::from("..").join(path.file_name().unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_relative_to_parent() {
|
||||
let path = std::env::current_dir().unwrap().join("docs");
|
||||
let parent = path.parent().unwrap();
|
||||
|
||||
let base = default().with_base(path.to_str().unwrap());
|
||||
|
||||
let relative = relative_to(parent, Some(&base)).unwrap();
|
||||
assert_eq!(relative, PathBuf::from(".."));
|
||||
|
||||
let relative =
|
||||
relative_to(parent, Some(&base.clone().with_prefix_dots())).unwrap();
|
||||
assert_eq!(relative, PathBuf::from(".."));
|
||||
|
||||
let relative =
|
||||
relative_to(parent, Some(&base.clone().without_suffix_dots())).unwrap();
|
||||
assert_eq!(
|
||||
relative,
|
||||
PathBuf::from("../..").join(parent.file_name().unwrap())
|
||||
);
|
||||
|
||||
let relative = relative_to(
|
||||
parent,
|
||||
Some(&base.clone().with_prefix_dots().without_suffix_dots()),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
relative,
|
||||
PathBuf::from("../..").join(parent.file_name().unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_relative_to_file() {
|
||||
let path = std::env::current_dir().unwrap().join("foo").join("bar");
|
||||
|
||||
let relative = relative_to(&path, NONE).unwrap();
|
||||
assert_eq!(relative, PathBuf::from("foo/bar"));
|
||||
|
||||
let relative = relative_to(&path, Some(&default().with_prefix_dots())).unwrap();
|
||||
assert_eq!(relative, PathBuf::from("./foo/bar"));
|
||||
|
||||
let relative = relative_to(
|
||||
&path,
|
||||
Some(&default().with_prefix_dots().without_suffix_dots()),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(relative, PathBuf::from("./foo/bar"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_relative_to_root() {
|
||||
let relative = relative_to("/foo", Some(&default().with_base("/"))).unwrap();
|
||||
assert_eq!(relative, PathBuf::from("foo"));
|
||||
|
||||
let relative = relative_to(
|
||||
"/foo",
|
||||
Some(
|
||||
&default()
|
||||
.with_base("/")
|
||||
.with_prefix_dots()
|
||||
.without_suffix_dots(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(relative, PathBuf::from("./foo"));
|
||||
|
||||
let relative = relative_to("/", Some(&default().with_base("/"))).unwrap();
|
||||
assert_eq!(relative, PathBuf::from("."));
|
||||
|
||||
let relative = relative_to(
|
||||
"/",
|
||||
Some(
|
||||
&default()
|
||||
.with_base("/")
|
||||
.with_prefix_dots()
|
||||
.without_suffix_dots(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(relative, PathBuf::from("."));
|
||||
|
||||
let relative = relative_to("/", Some(&default().with_base("/foo"))).unwrap();
|
||||
assert_eq!(relative, PathBuf::from(".."));
|
||||
|
||||
let relative = relative_to(
|
||||
"/",
|
||||
Some(
|
||||
&default()
|
||||
.with_base("/foo")
|
||||
.with_prefix_dots()
|
||||
.without_suffix_dots(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(relative, PathBuf::from(".."));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_relative_to_base() {
|
||||
let path = "/some/directory";
|
||||
let base = "/another/foo/bar";
|
||||
|
||||
let relative = relative_to(path, Some(&default().with_base(base))).unwrap();
|
||||
assert_eq!(relative, PathBuf::from("../../../some/directory"));
|
||||
|
||||
let relative = relative_to(
|
||||
path,
|
||||
Some(
|
||||
&default()
|
||||
.with_base(base)
|
||||
.with_prefix_dots()
|
||||
.without_suffix_dots(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(relative, PathBuf::from("../../../some/directory"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_shorten_home() {
|
||||
let path = HOME.as_ref().unwrap();
|
||||
|
||||
let res = shorten(path, NONE).unwrap();
|
||||
assert_eq!(res, "~");
|
||||
|
||||
let res = shorten(
|
||||
path,
|
||||
Some(&default().with_prefix_dots().without_suffix_dots()),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(res, "~");
|
||||
|
||||
let res = shorten(
|
||||
path,
|
||||
Some(&default().with_prefix_dots().without_suffix_dots()),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(res, "~");
|
||||
|
||||
let res = shorten(path.join("foo"), NONE).unwrap();
|
||||
assert_eq!(res, "~/foo");
|
||||
|
||||
let res = shorten(
|
||||
path.join("foo"),
|
||||
Some(&default().with_prefix_dots().without_suffix_dots()),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(res, "~/foo");
|
||||
|
||||
let res = shorten(format!("{}foo", path.to_string_lossy()), NONE).unwrap();
|
||||
assert_ne!(res, "~/foo");
|
||||
assert_eq!(res, format!("{}foo", path.to_string_lossy()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_shorten_base() {
|
||||
let path = "/present/working/directory";
|
||||
let base = "/present/foo/bar";
|
||||
|
||||
let res = shorten(path, Some(&default().with_base(base))).unwrap();
|
||||
assert_eq!(res, "../../working/directory");
|
||||
|
||||
let res = shorten(
|
||||
path,
|
||||
Some(
|
||||
&default()
|
||||
.with_base(base)
|
||||
.with_prefix_dots()
|
||||
.without_suffix_dots(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(res, "../../working/directory");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_shorten_pwd() {
|
||||
let path = "/present/working/directory";
|
||||
|
||||
let res = shorten(path, Some(&default().with_base(path))).unwrap();
|
||||
assert_eq!(res, ".");
|
||||
|
||||
let res = shorten(
|
||||
path,
|
||||
Some(
|
||||
&default()
|
||||
.with_base(path)
|
||||
.with_prefix_dots()
|
||||
.without_suffix_dots(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(res, "../directory");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_shorten_parent() {
|
||||
let path = "/present/working";
|
||||
let base = "/present/working/directory";
|
||||
|
||||
let res = shorten(path, Some(&default().with_base(base))).unwrap();
|
||||
assert_eq!(res, "..");
|
||||
|
||||
let res = shorten(
|
||||
path,
|
||||
Some(
|
||||
&default()
|
||||
.with_base(base)
|
||||
.with_prefix_dots()
|
||||
.without_suffix_dots(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(res, "../../working");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_shorten_root() {
|
||||
let res = shorten("/", Some(&default().with_base("/"))).unwrap();
|
||||
assert_eq!(res, "/");
|
||||
|
||||
let res = shorten(
|
||||
"/",
|
||||
Some(
|
||||
&default()
|
||||
.with_base("/")
|
||||
.with_prefix_dots()
|
||||
.without_suffix_dots(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(res, "/");
|
||||
|
||||
let res = shorten("/foo", Some(&default().with_base("/"))).unwrap();
|
||||
assert_eq!(res, "foo");
|
||||
|
||||
let res = shorten(
|
||||
"/foo",
|
||||
Some(
|
||||
&default()
|
||||
.with_base("/")
|
||||
.with_prefix_dots()
|
||||
.without_suffix_dots(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(res, "/foo");
|
||||
|
||||
let res = shorten(
|
||||
"/",
|
||||
Some(
|
||||
&default()
|
||||
.with_base("/foo")
|
||||
.with_prefix_dots()
|
||||
.without_suffix_dots(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(res, "/");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_path_escape() {
|
||||
let text = "foo".to_string();
|
||||
assert_eq!(escape(&text), "foo");
|
||||
|
||||
let text = "foo bar".to_string();
|
||||
assert_eq!(escape(&text), "'foo bar'");
|
||||
|
||||
let text = "foo\nbar".to_string();
|
||||
assert_eq!(escape(&text), "\"foo\\nbar\"");
|
||||
|
||||
let text = "foo$bar".to_string();
|
||||
assert_eq!(escape(&text), "'foo$bar'");
|
||||
|
||||
let text = "foo'$\n'bar".to_string();
|
||||
assert_eq!(escape(&text), "\"foo'\\$\\n'bar\"");
|
||||
|
||||
let text = "a'b\"c".to_string();
|
||||
assert_eq!(escape(&text), "\"a'b\\\"c\"");
|
||||
}
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
use lazy_static::lazy_static;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use skim::prelude::{ExactOrFuzzyEngineFactory, RegexEngineFactory};
|
||||
use skim::{MatchEngine, MatchEngineFactory, SkimItem};
|
||||
|
||||
lazy_static! {
|
||||
static ref FUZZY_FACTORY: ExactOrFuzzyEngineFactory =
|
||||
ExactOrFuzzyEngineFactory::builder().build();
|
||||
static ref REGEX_FACTORY: RegexEngineFactory = RegexEngineFactory::builder().build();
|
||||
}
|
||||
|
||||
pub struct PathItem {
|
||||
path: String,
|
||||
}
|
||||
|
||||
impl From<String> for PathItem {
|
||||
fn from(value: String) -> Self {
|
||||
Self { path: value }
|
||||
}
|
||||
}
|
||||
|
||||
impl SkimItem for PathItem {
|
||||
fn text(&self) -> std::borrow::Cow<str> {
|
||||
std::borrow::Cow::from(&self.path)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Default, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub enum SearchAlgorithm {
|
||||
#[default]
|
||||
Fuzzy,
|
||||
Regex,
|
||||
}
|
||||
|
||||
impl SearchAlgorithm {
|
||||
pub fn toggle(self) -> Self {
|
||||
match self {
|
||||
Self::Fuzzy => Self::Regex,
|
||||
Self::Regex => Self::Fuzzy,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn engine(&self, pattern: &str) -> Box<dyn MatchEngine> {
|
||||
match self {
|
||||
Self::Fuzzy => FUZZY_FACTORY.create_engine(pattern),
|
||||
Self::Regex => REGEX_FACTORY.create_engine(pattern),
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue