mirror of https://github.com/chipsenkbeil/distant
Refactor into protocol crate & change capabilities -> version (#189)
parent
95c0d0c0d1
commit
76dc7cf1fa
@ -1,572 +0,0 @@
|
|||||||
use std::io;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use derive_more::{From, IsVariant};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use strum::{AsRefStr, EnumDiscriminants, EnumIter, EnumMessage, EnumString};
|
|
||||||
|
|
||||||
mod capabilities;
|
|
||||||
pub use capabilities::*;
|
|
||||||
|
|
||||||
mod change;
|
|
||||||
pub use change::*;
|
|
||||||
|
|
||||||
mod cmd;
|
|
||||||
pub use cmd::*;
|
|
||||||
|
|
||||||
mod error;
|
|
||||||
pub use error::*;
|
|
||||||
|
|
||||||
mod filesystem;
|
|
||||||
pub use filesystem::*;
|
|
||||||
|
|
||||||
mod metadata;
|
|
||||||
pub use metadata::*;
|
|
||||||
|
|
||||||
mod permissions;
|
|
||||||
pub use permissions::*;
|
|
||||||
|
|
||||||
mod pty;
|
|
||||||
pub use pty::*;
|
|
||||||
|
|
||||||
mod search;
|
|
||||||
pub use search::*;
|
|
||||||
|
|
||||||
mod system;
|
|
||||||
pub use system::*;
|
|
||||||
|
|
||||||
mod utils;
|
|
||||||
pub(crate) use utils::*;
|
|
||||||
|
|
||||||
/// Id for a remote process
|
|
||||||
pub type ProcessId = u32;
|
|
||||||
|
|
||||||
/// Mapping of environment variables
|
|
||||||
pub type Environment = distant_net::common::Map;
|
|
||||||
|
|
||||||
/// Represents a wrapper around a distant message, supporting single and batch requests
|
|
||||||
#[derive(Clone, Debug, From, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(untagged)]
|
|
||||||
pub enum Msg<T> {
|
|
||||||
Single(T),
|
|
||||||
Batch(Vec<T>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Msg<T> {
|
|
||||||
/// Returns true if msg has a single payload
|
|
||||||
pub fn is_single(&self) -> bool {
|
|
||||||
matches!(self, Self::Single(_))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns reference to single value if msg is single variant
|
|
||||||
pub fn as_single(&self) -> Option<&T> {
|
|
||||||
match self {
|
|
||||||
Self::Single(x) => Some(x),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns mutable reference to single value if msg is single variant
|
|
||||||
pub fn as_mut_single(&mut self) -> Option<&T> {
|
|
||||||
match self {
|
|
||||||
Self::Single(x) => Some(x),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the single value if msg is single variant
|
|
||||||
pub fn into_single(self) -> Option<T> {
|
|
||||||
match self {
|
|
||||||
Self::Single(x) => Some(x),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if msg has a batch of payloads
|
|
||||||
pub fn is_batch(&self) -> bool {
|
|
||||||
matches!(self, Self::Batch(_))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns reference to batch value if msg is batch variant
|
|
||||||
pub fn as_batch(&self) -> Option<&[T]> {
|
|
||||||
match self {
|
|
||||||
Self::Batch(x) => Some(x),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns mutable reference to batch value if msg is batch variant
|
|
||||||
pub fn as_mut_batch(&mut self) -> Option<&mut [T]> {
|
|
||||||
match self {
|
|
||||||
Self::Batch(x) => Some(x),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the batch value if msg is batch variant
|
|
||||||
pub fn into_batch(self) -> Option<Vec<T>> {
|
|
||||||
match self {
|
|
||||||
Self::Batch(x) => Some(x),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert into a collection of payload data
|
|
||||||
pub fn into_vec(self) -> Vec<T> {
|
|
||||||
match self {
|
|
||||||
Self::Single(x) => vec![x],
|
|
||||||
Self::Batch(x) => x,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl<T: schemars::JsonSchema> Msg<T> {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(Msg<T>)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents the payload of a request to be performed on the remote machine
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, EnumDiscriminants, IsVariant, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[strum_discriminants(derive(
|
|
||||||
AsRefStr,
|
|
||||||
strum::Display,
|
|
||||||
EnumIter,
|
|
||||||
EnumMessage,
|
|
||||||
EnumString,
|
|
||||||
Hash,
|
|
||||||
PartialOrd,
|
|
||||||
Ord,
|
|
||||||
IsVariant,
|
|
||||||
Serialize,
|
|
||||||
Deserialize
|
|
||||||
))]
|
|
||||||
#[cfg_attr(
|
|
||||||
feature = "schemars",
|
|
||||||
strum_discriminants(derive(schemars::JsonSchema))
|
|
||||||
)]
|
|
||||||
#[strum_discriminants(name(CapabilityKind))]
|
|
||||||
#[strum_discriminants(strum(serialize_all = "snake_case"))]
|
|
||||||
#[serde(rename_all = "snake_case", deny_unknown_fields, tag = "type")]
|
|
||||||
pub enum Request {
|
|
||||||
/// Retrieve information about the server's capabilities
|
|
||||||
#[strum_discriminants(strum(message = "Supports retrieving capabilities"))]
|
|
||||||
Capabilities {},
|
|
||||||
|
|
||||||
/// Reads a file from the specified path on the remote machine
|
|
||||||
#[strum_discriminants(strum(message = "Supports reading binary file"))]
|
|
||||||
FileRead {
|
|
||||||
/// The path to the file on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Reads a file from the specified path on the remote machine
|
|
||||||
/// and treats the contents as text
|
|
||||||
#[strum_discriminants(strum(message = "Supports reading text file"))]
|
|
||||||
FileReadText {
|
|
||||||
/// The path to the file on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Writes a file, creating it if it does not exist, and overwriting any existing content
|
|
||||||
/// on the remote machine
|
|
||||||
#[strum_discriminants(strum(message = "Supports writing binary file"))]
|
|
||||||
FileWrite {
|
|
||||||
/// The path to the file on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
|
|
||||||
/// Data for server-side writing of content
|
|
||||||
#[serde(with = "serde_bytes")]
|
|
||||||
#[cfg_attr(feature = "schemars", schemars(with = "Vec<u8>"))]
|
|
||||||
data: Vec<u8>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Writes a file using text instead of bytes, creating it if it does not exist,
|
|
||||||
/// and overwriting any existing content on the remote machine
|
|
||||||
#[strum_discriminants(strum(message = "Supports writing text file"))]
|
|
||||||
FileWriteText {
|
|
||||||
/// The path to the file on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
|
|
||||||
/// Data for server-side writing of content
|
|
||||||
text: String,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Appends to a file, creating it if it does not exist, on the remote machine
|
|
||||||
#[strum_discriminants(strum(message = "Supports appending to binary file"))]
|
|
||||||
FileAppend {
|
|
||||||
/// The path to the file on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
|
|
||||||
/// Data for server-side writing of content
|
|
||||||
#[serde(with = "serde_bytes")]
|
|
||||||
#[cfg_attr(feature = "schemars", schemars(with = "Vec<u8>"))]
|
|
||||||
data: Vec<u8>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Appends text to a file, creating it if it does not exist, on the remote machine
|
|
||||||
#[strum_discriminants(strum(message = "Supports appending to text file"))]
|
|
||||||
FileAppendText {
|
|
||||||
/// The path to the file on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
|
|
||||||
/// Data for server-side writing of content
|
|
||||||
text: String,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Reads a directory from the specified path on the remote machine
|
|
||||||
#[strum_discriminants(strum(message = "Supports reading directory"))]
|
|
||||||
DirRead {
|
|
||||||
/// The path to the directory on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
|
|
||||||
/// Maximum depth to traverse with 0 indicating there is no maximum
|
|
||||||
/// depth and 1 indicating the most immediate children within the
|
|
||||||
/// directory
|
|
||||||
#[serde(default = "one")]
|
|
||||||
depth: usize,
|
|
||||||
|
|
||||||
/// Whether or not to return absolute or relative paths
|
|
||||||
#[serde(default)]
|
|
||||||
absolute: bool,
|
|
||||||
|
|
||||||
/// Whether or not to canonicalize the resulting paths, meaning
|
|
||||||
/// returning the canonical, absolute form of a path with all
|
|
||||||
/// intermediate components normalized and symbolic links resolved
|
|
||||||
///
|
|
||||||
/// Note that the flag absolute must be true to have absolute paths
|
|
||||||
/// returned, even if canonicalize is flagged as true
|
|
||||||
#[serde(default)]
|
|
||||||
canonicalize: bool,
|
|
||||||
|
|
||||||
/// Whether or not to include the root directory in the retrieved
|
|
||||||
/// entries
|
|
||||||
///
|
|
||||||
/// If included, the root directory will also be a canonicalized,
|
|
||||||
/// absolute path and will not follow any of the other flags
|
|
||||||
#[serde(default)]
|
|
||||||
include_root: bool,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Creates a directory on the remote machine
|
|
||||||
#[strum_discriminants(strum(message = "Supports creating directory"))]
|
|
||||||
DirCreate {
|
|
||||||
/// The path to the directory on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
|
|
||||||
/// Whether or not to create all parent directories
|
|
||||||
#[serde(default)]
|
|
||||||
all: bool,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Removes a file or directory on the remote machine
|
|
||||||
#[strum_discriminants(strum(message = "Supports removing files, directories, and symlinks"))]
|
|
||||||
Remove {
|
|
||||||
/// The path to the file or directory on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
|
|
||||||
/// Whether or not to remove all contents within directory if is a directory.
|
|
||||||
/// Does nothing different for files
|
|
||||||
#[serde(default)]
|
|
||||||
force: bool,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Copies a file or directory on the remote machine
|
|
||||||
#[strum_discriminants(strum(message = "Supports copying files, directories, and symlinks"))]
|
|
||||||
Copy {
|
|
||||||
/// The path to the file or directory on the remote machine
|
|
||||||
src: PathBuf,
|
|
||||||
|
|
||||||
/// New location on the remote machine for copy of file or directory
|
|
||||||
dst: PathBuf,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Moves/renames a file or directory on the remote machine
|
|
||||||
#[strum_discriminants(strum(message = "Supports renaming files, directories, and symlinks"))]
|
|
||||||
Rename {
|
|
||||||
/// The path to the file or directory on the remote machine
|
|
||||||
src: PathBuf,
|
|
||||||
|
|
||||||
/// New location on the remote machine for the file or directory
|
|
||||||
dst: PathBuf,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Watches a path for changes
|
|
||||||
#[strum_discriminants(strum(message = "Supports watching filesystem for changes"))]
|
|
||||||
Watch {
|
|
||||||
/// The path to the file, directory, or symlink on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
|
|
||||||
/// If true, will recursively watch for changes within directories, othewise
|
|
||||||
/// will only watch for changes immediately within directories
|
|
||||||
#[serde(default)]
|
|
||||||
recursive: bool,
|
|
||||||
|
|
||||||
/// Filter to only report back specified changes
|
|
||||||
#[serde(default)]
|
|
||||||
only: Vec<ChangeKind>,
|
|
||||||
|
|
||||||
/// Filter to report back changes except these specified changes
|
|
||||||
#[serde(default)]
|
|
||||||
except: Vec<ChangeKind>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Unwatches a path for changes, meaning no additional changes will be reported
|
|
||||||
#[strum_discriminants(strum(message = "Supports unwatching filesystem for changes"))]
|
|
||||||
Unwatch {
|
|
||||||
/// The path to the file, directory, or symlink on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Checks whether the given path exists
|
|
||||||
#[strum_discriminants(strum(message = "Supports checking if a path exists"))]
|
|
||||||
Exists {
|
|
||||||
/// The path to the file or directory on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Retrieves filesystem metadata for the specified path on the remote machine
|
|
||||||
#[strum_discriminants(strum(
|
|
||||||
message = "Supports retrieving metadata about a file, directory, or symlink"
|
|
||||||
))]
|
|
||||||
Metadata {
|
|
||||||
/// The path to the file, directory, or symlink on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
|
|
||||||
/// Whether or not to include a canonicalized version of the path, meaning
|
|
||||||
/// returning the canonical, absolute form of a path with all
|
|
||||||
/// intermediate components normalized and symbolic links resolved
|
|
||||||
#[serde(default)]
|
|
||||||
canonicalize: bool,
|
|
||||||
|
|
||||||
/// Whether or not to follow symlinks to determine absolute file type (dir/file)
|
|
||||||
#[serde(default)]
|
|
||||||
resolve_file_type: bool,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Sets permissions on a file, directory, or symlink on the remote machine
|
|
||||||
#[strum_discriminants(strum(
|
|
||||||
message = "Supports setting permissions on a file, directory, or symlink"
|
|
||||||
))]
|
|
||||||
SetPermissions {
|
|
||||||
/// The path to the file, directory, or symlink on the remote machine
|
|
||||||
path: PathBuf,
|
|
||||||
|
|
||||||
/// New permissions to apply to the file, directory, or symlink
|
|
||||||
permissions: Permissions,
|
|
||||||
|
|
||||||
/// Additional options to supply when setting permissions
|
|
||||||
#[serde(default)]
|
|
||||||
options: SetPermissionsOptions,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Searches filesystem using the provided query
|
|
||||||
#[strum_discriminants(strum(message = "Supports searching filesystem using queries"))]
|
|
||||||
Search {
|
|
||||||
/// Query to perform against the filesystem
|
|
||||||
query: SearchQuery,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Cancels an active search being run against the filesystem
|
|
||||||
#[strum_discriminants(strum(
|
|
||||||
message = "Supports canceling an active search against the filesystem"
|
|
||||||
))]
|
|
||||||
CancelSearch {
|
|
||||||
/// Id of the search to cancel
|
|
||||||
id: SearchId,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Spawns a new process on the remote machine
|
|
||||||
#[strum_discriminants(strum(message = "Supports spawning a process"))]
|
|
||||||
ProcSpawn {
|
|
||||||
/// The full command to run including arguments
|
|
||||||
cmd: Cmd,
|
|
||||||
|
|
||||||
/// Environment to provide to the remote process
|
|
||||||
#[serde(default)]
|
|
||||||
environment: Environment,
|
|
||||||
|
|
||||||
/// Alternative current directory for the remote process
|
|
||||||
#[serde(default)]
|
|
||||||
current_dir: Option<PathBuf>,
|
|
||||||
|
|
||||||
/// If provided, will spawn process in a pty, otherwise spawns directly
|
|
||||||
#[serde(default)]
|
|
||||||
pty: Option<PtySize>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Kills a process running on the remote machine
|
|
||||||
#[strum_discriminants(strum(message = "Supports killing a spawned process"))]
|
|
||||||
ProcKill {
|
|
||||||
/// Id of the actively-running process
|
|
||||||
id: ProcessId,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Sends additional data to stdin of running process
|
|
||||||
#[strum_discriminants(strum(message = "Supports sending stdin to a spawned process"))]
|
|
||||||
ProcStdin {
|
|
||||||
/// Id of the actively-running process to send stdin data
|
|
||||||
id: ProcessId,
|
|
||||||
|
|
||||||
/// Data to send to a process's stdin pipe
|
|
||||||
#[serde(with = "serde_bytes")]
|
|
||||||
#[cfg_attr(feature = "schemars", schemars(with = "Vec<u8>"))]
|
|
||||||
data: Vec<u8>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Resize pty of remote process
|
|
||||||
#[strum_discriminants(strum(message = "Supports resizing the pty of a spawned process"))]
|
|
||||||
ProcResizePty {
|
|
||||||
/// Id of the actively-running process whose pty to resize
|
|
||||||
id: ProcessId,
|
|
||||||
|
|
||||||
/// The new pty dimensions
|
|
||||||
size: PtySize,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Retrieve information about the server and the system it is on
|
|
||||||
#[strum_discriminants(strum(message = "Supports retrieving system information"))]
|
|
||||||
SystemInfo {},
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl Request {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(Request)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents the payload of a successful response
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, AsRefStr, IsVariant, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(rename_all = "snake_case", deny_unknown_fields, tag = "type")]
|
|
||||||
#[strum(serialize_all = "snake_case")]
|
|
||||||
pub enum Response {
|
|
||||||
/// General okay with no extra data, returned in cases like
|
|
||||||
/// creating or removing a directory, copying a file, or renaming
|
|
||||||
/// a file
|
|
||||||
Ok,
|
|
||||||
|
|
||||||
/// General-purpose failure that occurred from some request
|
|
||||||
Error(Error),
|
|
||||||
|
|
||||||
/// Response containing some arbitrary, binary data
|
|
||||||
Blob {
|
|
||||||
/// Binary data associated with the response
|
|
||||||
#[serde(with = "serde_bytes")]
|
|
||||||
#[cfg_attr(feature = "schemars", schemars(with = "Vec<u8>"))]
|
|
||||||
data: Vec<u8>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Response containing some arbitrary, text data
|
|
||||||
Text {
|
|
||||||
/// Text data associated with the response
|
|
||||||
data: String,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Response to reading a directory
|
|
||||||
DirEntries {
|
|
||||||
/// Entries contained within the requested directory
|
|
||||||
entries: Vec<DirEntry>,
|
|
||||||
|
|
||||||
/// Errors encountered while scanning for entries
|
|
||||||
errors: Vec<Error>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Response to a filesystem change for some watched file, directory, or symlink
|
|
||||||
Changed(Change),
|
|
||||||
|
|
||||||
/// Response to checking if a path exists
|
|
||||||
Exists { value: bool },
|
|
||||||
|
|
||||||
/// Represents metadata about some filesystem object (file, directory, symlink) on remote machine
|
|
||||||
Metadata(Metadata),
|
|
||||||
|
|
||||||
/// Represents a search being started
|
|
||||||
SearchStarted {
|
|
||||||
/// Arbitrary id associated with search
|
|
||||||
id: SearchId,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Represents some subset of results for a search query (may not be all of them)
|
|
||||||
SearchResults {
|
|
||||||
/// Arbitrary id associated with search
|
|
||||||
id: SearchId,
|
|
||||||
|
|
||||||
/// Collection of matches from performing a query
|
|
||||||
matches: Vec<SearchQueryMatch>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Represents a search being completed
|
|
||||||
SearchDone {
|
|
||||||
/// Arbitrary id associated with search
|
|
||||||
id: SearchId,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Response to starting a new process
|
|
||||||
ProcSpawned {
|
|
||||||
/// Arbitrary id associated with running process
|
|
||||||
id: ProcessId,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Actively-transmitted stdout as part of running process
|
|
||||||
ProcStdout {
|
|
||||||
/// Arbitrary id associated with running process
|
|
||||||
id: ProcessId,
|
|
||||||
|
|
||||||
/// Data read from a process' stdout pipe
|
|
||||||
#[serde(with = "serde_bytes")]
|
|
||||||
#[cfg_attr(feature = "schemars", schemars(with = "Vec<u8>"))]
|
|
||||||
data: Vec<u8>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Actively-transmitted stderr as part of running process
|
|
||||||
ProcStderr {
|
|
||||||
/// Arbitrary id associated with running process
|
|
||||||
id: ProcessId,
|
|
||||||
|
|
||||||
/// Data read from a process' stderr pipe
|
|
||||||
#[serde(with = "serde_bytes")]
|
|
||||||
#[cfg_attr(feature = "schemars", schemars(with = "Vec<u8>"))]
|
|
||||||
data: Vec<u8>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Response to a process finishing
|
|
||||||
ProcDone {
|
|
||||||
/// Arbitrary id associated with running process
|
|
||||||
id: ProcessId,
|
|
||||||
|
|
||||||
/// Whether or not termination was successful
|
|
||||||
success: bool,
|
|
||||||
|
|
||||||
/// Exit code associated with termination, will be missing if terminated by signal
|
|
||||||
code: Option<i32>,
|
|
||||||
},
|
|
||||||
|
|
||||||
/// Response to retrieving information about the server and the system it is on
|
|
||||||
SystemInfo(SystemInfo),
|
|
||||||
|
|
||||||
/// Response to retrieving information about the server's capabilities
|
|
||||||
Capabilities { supported: Capabilities },
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl Response {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(Response)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<io::Error> for Response {
|
|
||||||
fn from(x: io::Error) -> Self {
|
|
||||||
Self::Error(Error::from(x))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Used to provide a default serde value of 1
|
|
||||||
const fn one() -> usize {
|
|
||||||
1
|
|
||||||
}
|
|
@ -1,207 +0,0 @@
|
|||||||
use std::cmp::Ordering;
|
|
||||||
use std::collections::HashSet;
|
|
||||||
use std::hash::{Hash, Hasher};
|
|
||||||
use std::ops::{BitAnd, BitOr, BitXor};
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use derive_more::{From, Into, IntoIterator};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use strum::{EnumMessage, IntoEnumIterator};
|
|
||||||
|
|
||||||
use super::CapabilityKind;
|
|
||||||
|
|
||||||
/// Set of supported capabilities for a server
|
|
||||||
#[derive(Clone, Debug, From, Into, PartialEq, Eq, IntoIterator, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct Capabilities(#[into_iterator(owned, ref)] HashSet<Capability>);
|
|
||||||
|
|
||||||
impl Capabilities {
|
|
||||||
/// Return set of capabilities encompassing all possible capabilities
|
|
||||||
pub fn all() -> Self {
|
|
||||||
Self(CapabilityKind::iter().map(Capability::from).collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return empty set of capabilities
|
|
||||||
pub fn none() -> Self {
|
|
||||||
Self(HashSet::new())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the capability with described kind is included
|
|
||||||
pub fn contains(&self, kind: impl AsRef<str>) -> bool {
|
|
||||||
let cap = Capability {
|
|
||||||
kind: kind.as_ref().to_string(),
|
|
||||||
description: String::new(),
|
|
||||||
};
|
|
||||||
self.0.contains(&cap)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Adds the specified capability to the set of capabilities
|
|
||||||
///
|
|
||||||
/// * If the set did not have this capability, returns `true`
|
|
||||||
/// * If the set did have this capability, returns `false`
|
|
||||||
pub fn insert(&mut self, cap: impl Into<Capability>) -> bool {
|
|
||||||
self.0.insert(cap.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Removes the capability with the described kind, returning the capability
|
|
||||||
pub fn take(&mut self, kind: impl AsRef<str>) -> Option<Capability> {
|
|
||||||
let cap = Capability {
|
|
||||||
kind: kind.as_ref().to_string(),
|
|
||||||
description: String::new(),
|
|
||||||
};
|
|
||||||
self.0.take(&cap)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Removes the capability with the described kind, returning true if it existed
|
|
||||||
pub fn remove(&mut self, kind: impl AsRef<str>) -> bool {
|
|
||||||
let cap = Capability {
|
|
||||||
kind: kind.as_ref().to_string(),
|
|
||||||
description: String::new(),
|
|
||||||
};
|
|
||||||
self.0.remove(&cap)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Converts into vec of capabilities sorted by kind
|
|
||||||
pub fn into_sorted_vec(self) -> Vec<Capability> {
|
|
||||||
let mut this = self.0.into_iter().collect::<Vec<_>>();
|
|
||||||
|
|
||||||
this.sort_unstable();
|
|
||||||
|
|
||||||
this
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl Capabilities {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(Capabilities)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BitAnd for &Capabilities {
|
|
||||||
type Output = Capabilities;
|
|
||||||
|
|
||||||
fn bitand(self, rhs: Self) -> Self::Output {
|
|
||||||
Capabilities(self.0.bitand(&rhs.0))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BitOr for &Capabilities {
|
|
||||||
type Output = Capabilities;
|
|
||||||
|
|
||||||
fn bitor(self, rhs: Self) -> Self::Output {
|
|
||||||
Capabilities(self.0.bitor(&rhs.0))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BitOr<Capability> for &Capabilities {
|
|
||||||
type Output = Capabilities;
|
|
||||||
|
|
||||||
fn bitor(self, rhs: Capability) -> Self::Output {
|
|
||||||
let mut other = Capabilities::none();
|
|
||||||
other.0.insert(rhs);
|
|
||||||
|
|
||||||
self.bitor(&other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BitXor for &Capabilities {
|
|
||||||
type Output = Capabilities;
|
|
||||||
|
|
||||||
fn bitxor(self, rhs: Self) -> Self::Output {
|
|
||||||
Capabilities(self.0.bitxor(&rhs.0))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromIterator<Capability> for Capabilities {
|
|
||||||
fn from_iter<I: IntoIterator<Item = Capability>>(iter: I) -> Self {
|
|
||||||
let mut this = Capabilities::none();
|
|
||||||
|
|
||||||
for capability in iter {
|
|
||||||
this.0.insert(capability);
|
|
||||||
}
|
|
||||||
|
|
||||||
this
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Capability tied to a server. A capability is equivalent based on its kind and not description.
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
|
||||||
pub struct Capability {
|
|
||||||
/// Label describing the kind of capability
|
|
||||||
pub kind: String,
|
|
||||||
|
|
||||||
/// Information about the capability
|
|
||||||
pub description: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Capability {
|
|
||||||
/// Will convert the [`Capability`]'s `kind` into a known [`CapabilityKind`] if possible,
|
|
||||||
/// returning None if the capability is unknown
|
|
||||||
pub fn to_capability_kind(&self) -> Option<CapabilityKind> {
|
|
||||||
CapabilityKind::from_str(&self.kind).ok()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the described capability is unknown
|
|
||||||
pub fn is_unknown(&self) -> bool {
|
|
||||||
self.to_capability_kind().is_none()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for Capability {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.kind.eq_ignore_ascii_case(&other.kind)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for Capability {}
|
|
||||||
|
|
||||||
impl PartialOrd for Capability {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
||||||
Some(self.cmp(other))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Ord for Capability {
|
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
|
||||||
self.kind
|
|
||||||
.to_ascii_lowercase()
|
|
||||||
.cmp(&other.kind.to_ascii_lowercase())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Hash for Capability {
|
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
|
||||||
self.kind.to_ascii_lowercase().hash(state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<CapabilityKind> for Capability {
|
|
||||||
/// Creates a new capability using the kind's default message
|
|
||||||
fn from(kind: CapabilityKind) -> Self {
|
|
||||||
Self {
|
|
||||||
kind: kind.to_string(),
|
|
||||||
description: kind
|
|
||||||
.get_message()
|
|
||||||
.map(ToString::to_string)
|
|
||||||
.unwrap_or_default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl Capability {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(Capability)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl CapabilityKind {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(CapabilityKind)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,516 +0,0 @@
|
|||||||
use std::collections::HashSet;
|
|
||||||
use std::fmt;
|
|
||||||
use std::hash::{Hash, Hasher};
|
|
||||||
use std::iter::FromIterator;
|
|
||||||
use std::ops::{BitOr, Sub};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use derive_more::{Deref, DerefMut, IntoIterator};
|
|
||||||
use notify::event::Event as NotifyEvent;
|
|
||||||
use notify::EventKind as NotifyEventKind;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use strum::{EnumString, EnumVariantNames, VariantNames};
|
|
||||||
|
|
||||||
/// Change to one or more paths on the filesystem
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
|
||||||
pub struct Change {
|
|
||||||
/// Label describing the kind of change
|
|
||||||
pub kind: ChangeKind,
|
|
||||||
|
|
||||||
/// Paths that were changed
|
|
||||||
pub paths: Vec<PathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl Change {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(Change)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<NotifyEvent> for Change {
|
|
||||||
fn from(x: NotifyEvent) -> Self {
|
|
||||||
Self {
|
|
||||||
kind: x.kind.into(),
|
|
||||||
paths: x.paths,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(
|
|
||||||
Copy,
|
|
||||||
Clone,
|
|
||||||
Debug,
|
|
||||||
strum::Display,
|
|
||||||
EnumString,
|
|
||||||
EnumVariantNames,
|
|
||||||
Hash,
|
|
||||||
PartialEq,
|
|
||||||
Eq,
|
|
||||||
PartialOrd,
|
|
||||||
Ord,
|
|
||||||
Serialize,
|
|
||||||
Deserialize,
|
|
||||||
)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
|
||||||
#[strum(serialize_all = "snake_case")]
|
|
||||||
pub enum ChangeKind {
|
|
||||||
/// Something about a file or directory was accessed, but
|
|
||||||
/// no specific details were known
|
|
||||||
Access,
|
|
||||||
|
|
||||||
/// A file was closed for executing
|
|
||||||
AccessCloseExecute,
|
|
||||||
|
|
||||||
/// A file was closed for reading
|
|
||||||
AccessCloseRead,
|
|
||||||
|
|
||||||
/// A file was closed for writing
|
|
||||||
AccessCloseWrite,
|
|
||||||
|
|
||||||
/// A file was opened for executing
|
|
||||||
AccessOpenExecute,
|
|
||||||
|
|
||||||
/// A file was opened for reading
|
|
||||||
AccessOpenRead,
|
|
||||||
|
|
||||||
/// A file was opened for writing
|
|
||||||
AccessOpenWrite,
|
|
||||||
|
|
||||||
/// A file or directory was read
|
|
||||||
AccessRead,
|
|
||||||
|
|
||||||
/// The access time of a file or directory was changed
|
|
||||||
AccessTime,
|
|
||||||
|
|
||||||
/// A file, directory, or something else was created
|
|
||||||
Create,
|
|
||||||
|
|
||||||
/// The content of a file or directory changed
|
|
||||||
Content,
|
|
||||||
|
|
||||||
/// The data of a file or directory was modified, but
|
|
||||||
/// no specific details were known
|
|
||||||
Data,
|
|
||||||
|
|
||||||
/// The metadata of a file or directory was modified, but
|
|
||||||
/// no specific details were known
|
|
||||||
Metadata,
|
|
||||||
|
|
||||||
/// Something about a file or directory was modified, but
|
|
||||||
/// no specific details were known
|
|
||||||
Modify,
|
|
||||||
|
|
||||||
/// A file, directory, or something else was removed
|
|
||||||
Remove,
|
|
||||||
|
|
||||||
/// A file or directory was renamed, but no specific details were known
|
|
||||||
Rename,
|
|
||||||
|
|
||||||
/// A file or directory was renamed, and the provided paths
|
|
||||||
/// are the source and target in that order (from, to)
|
|
||||||
RenameBoth,
|
|
||||||
|
|
||||||
/// A file or directory was renamed, and the provided path
|
|
||||||
/// is the origin of the rename (before being renamed)
|
|
||||||
RenameFrom,
|
|
||||||
|
|
||||||
/// A file or directory was renamed, and the provided path
|
|
||||||
/// is the result of the rename
|
|
||||||
RenameTo,
|
|
||||||
|
|
||||||
/// A file's size changed
|
|
||||||
Size,
|
|
||||||
|
|
||||||
/// The ownership of a file or directory was changed
|
|
||||||
Ownership,
|
|
||||||
|
|
||||||
/// The permissions of a file or directory was changed
|
|
||||||
Permissions,
|
|
||||||
|
|
||||||
/// The write or modify time of a file or directory was changed
|
|
||||||
WriteTime,
|
|
||||||
|
|
||||||
// Catchall in case we have no insight as to the type of change
|
|
||||||
Unknown,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ChangeKind {
|
|
||||||
/// Returns a list of all variants as str names
|
|
||||||
pub const fn variants() -> &'static [&'static str] {
|
|
||||||
Self::VARIANTS
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a list of all variants as a vec
|
|
||||||
pub fn all() -> Vec<ChangeKind> {
|
|
||||||
ChangeKindSet::all().into_sorted_vec()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the change is a kind of access
|
|
||||||
pub fn is_access_kind(&self) -> bool {
|
|
||||||
self.is_open_access_kind()
|
|
||||||
|| self.is_close_access_kind()
|
|
||||||
|| matches!(self, Self::Access | Self::AccessRead)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the change is a kind of open access
|
|
||||||
pub fn is_open_access_kind(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Self::AccessOpenExecute | Self::AccessOpenRead | Self::AccessOpenWrite
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the change is a kind of close access
|
|
||||||
pub fn is_close_access_kind(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Self::AccessCloseExecute | Self::AccessCloseRead | Self::AccessCloseWrite
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the change is a kind of creation
|
|
||||||
pub fn is_create_kind(&self) -> bool {
|
|
||||||
matches!(self, Self::Create)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the change is a kind of modification
|
|
||||||
pub fn is_modify_kind(&self) -> bool {
|
|
||||||
self.is_data_modify_kind() || self.is_metadata_modify_kind() || matches!(self, Self::Modify)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the change is a kind of data modification
|
|
||||||
pub fn is_data_modify_kind(&self) -> bool {
|
|
||||||
matches!(self, Self::Content | Self::Data | Self::Size)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the change is a kind of metadata modification
|
|
||||||
pub fn is_metadata_modify_kind(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Self::AccessTime
|
|
||||||
| Self::Metadata
|
|
||||||
| Self::Ownership
|
|
||||||
| Self::Permissions
|
|
||||||
| Self::WriteTime
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the change is a kind of rename
|
|
||||||
pub fn is_rename_kind(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Self::Rename | Self::RenameBoth | Self::RenameFrom | Self::RenameTo
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the change is a kind of removal
|
|
||||||
pub fn is_remove_kind(&self) -> bool {
|
|
||||||
matches!(self, Self::Remove)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the change kind is unknown
|
|
||||||
pub fn is_unknown_kind(&self) -> bool {
|
|
||||||
matches!(self, Self::Unknown)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl ChangeKind {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(ChangeKind)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BitOr for ChangeKind {
|
|
||||||
type Output = ChangeKindSet;
|
|
||||||
|
|
||||||
fn bitor(self, rhs: Self) -> Self::Output {
|
|
||||||
let mut set = ChangeKindSet::empty();
|
|
||||||
set.insert(self);
|
|
||||||
set.insert(rhs);
|
|
||||||
set
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<NotifyEventKind> for ChangeKind {
|
|
||||||
fn from(x: NotifyEventKind) -> Self {
|
|
||||||
use notify::event::{
|
|
||||||
AccessKind, AccessMode, DataChange, MetadataKind, ModifyKind, RenameMode,
|
|
||||||
};
|
|
||||||
match x {
|
|
||||||
// File/directory access events
|
|
||||||
NotifyEventKind::Access(AccessKind::Read) => Self::AccessRead,
|
|
||||||
NotifyEventKind::Access(AccessKind::Open(AccessMode::Execute)) => {
|
|
||||||
Self::AccessOpenExecute
|
|
||||||
}
|
|
||||||
NotifyEventKind::Access(AccessKind::Open(AccessMode::Read)) => Self::AccessOpenRead,
|
|
||||||
NotifyEventKind::Access(AccessKind::Open(AccessMode::Write)) => Self::AccessOpenWrite,
|
|
||||||
NotifyEventKind::Access(AccessKind::Close(AccessMode::Execute)) => {
|
|
||||||
Self::AccessCloseExecute
|
|
||||||
}
|
|
||||||
NotifyEventKind::Access(AccessKind::Close(AccessMode::Read)) => Self::AccessCloseRead,
|
|
||||||
NotifyEventKind::Access(AccessKind::Close(AccessMode::Write)) => Self::AccessCloseWrite,
|
|
||||||
NotifyEventKind::Access(_) => Self::Access,
|
|
||||||
|
|
||||||
// File/directory creation events
|
|
||||||
NotifyEventKind::Create(_) => Self::Create,
|
|
||||||
|
|
||||||
// Rename-oriented events
|
|
||||||
NotifyEventKind::Modify(ModifyKind::Name(RenameMode::Both)) => Self::RenameBoth,
|
|
||||||
NotifyEventKind::Modify(ModifyKind::Name(RenameMode::From)) => Self::RenameFrom,
|
|
||||||
NotifyEventKind::Modify(ModifyKind::Name(RenameMode::To)) => Self::RenameTo,
|
|
||||||
NotifyEventKind::Modify(ModifyKind::Name(_)) => Self::Rename,
|
|
||||||
|
|
||||||
// Data-modification events
|
|
||||||
NotifyEventKind::Modify(ModifyKind::Data(DataChange::Content)) => Self::Content,
|
|
||||||
NotifyEventKind::Modify(ModifyKind::Data(DataChange::Size)) => Self::Size,
|
|
||||||
NotifyEventKind::Modify(ModifyKind::Data(_)) => Self::Data,
|
|
||||||
|
|
||||||
// Metadata-modification events
|
|
||||||
NotifyEventKind::Modify(ModifyKind::Metadata(MetadataKind::AccessTime)) => {
|
|
||||||
Self::AccessTime
|
|
||||||
}
|
|
||||||
NotifyEventKind::Modify(ModifyKind::Metadata(MetadataKind::WriteTime)) => {
|
|
||||||
Self::WriteTime
|
|
||||||
}
|
|
||||||
NotifyEventKind::Modify(ModifyKind::Metadata(MetadataKind::Permissions)) => {
|
|
||||||
Self::Permissions
|
|
||||||
}
|
|
||||||
NotifyEventKind::Modify(ModifyKind::Metadata(MetadataKind::Ownership)) => {
|
|
||||||
Self::Ownership
|
|
||||||
}
|
|
||||||
NotifyEventKind::Modify(ModifyKind::Metadata(_)) => Self::Metadata,
|
|
||||||
|
|
||||||
// General modification events
|
|
||||||
NotifyEventKind::Modify(_) => Self::Modify,
|
|
||||||
|
|
||||||
// File/directory removal events
|
|
||||||
NotifyEventKind::Remove(_) => Self::Remove,
|
|
||||||
|
|
||||||
// Catch-all for other events
|
|
||||||
NotifyEventKind::Any | NotifyEventKind::Other => Self::Unknown,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents a distinct set of different change kinds
|
|
||||||
#[derive(Clone, Debug, Deref, DerefMut, IntoIterator, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct ChangeKindSet(HashSet<ChangeKind>);
|
|
||||||
|
|
||||||
impl ChangeKindSet {
|
|
||||||
/// Produces an empty set of [`ChangeKind`]
|
|
||||||
pub fn empty() -> Self {
|
|
||||||
Self(HashSet::new())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Produces a set of all [`ChangeKind`]
|
|
||||||
pub fn all() -> Self {
|
|
||||||
vec![
|
|
||||||
ChangeKind::Access,
|
|
||||||
ChangeKind::AccessCloseExecute,
|
|
||||||
ChangeKind::AccessCloseRead,
|
|
||||||
ChangeKind::AccessCloseWrite,
|
|
||||||
ChangeKind::AccessOpenExecute,
|
|
||||||
ChangeKind::AccessOpenRead,
|
|
||||||
ChangeKind::AccessOpenWrite,
|
|
||||||
ChangeKind::AccessRead,
|
|
||||||
ChangeKind::AccessTime,
|
|
||||||
ChangeKind::Create,
|
|
||||||
ChangeKind::Content,
|
|
||||||
ChangeKind::Data,
|
|
||||||
ChangeKind::Metadata,
|
|
||||||
ChangeKind::Modify,
|
|
||||||
ChangeKind::Remove,
|
|
||||||
ChangeKind::Rename,
|
|
||||||
ChangeKind::RenameBoth,
|
|
||||||
ChangeKind::RenameFrom,
|
|
||||||
ChangeKind::RenameTo,
|
|
||||||
ChangeKind::Size,
|
|
||||||
ChangeKind::Ownership,
|
|
||||||
ChangeKind::Permissions,
|
|
||||||
ChangeKind::WriteTime,
|
|
||||||
ChangeKind::Unknown,
|
|
||||||
]
|
|
||||||
.into_iter()
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Produces a changeset containing all of the access kinds
|
|
||||||
pub fn access_set() -> Self {
|
|
||||||
Self::access_open_set()
|
|
||||||
| Self::access_close_set()
|
|
||||||
| ChangeKind::AccessRead
|
|
||||||
| ChangeKind::Access
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Produces a changeset containing all of the open access kinds
|
|
||||||
pub fn access_open_set() -> Self {
|
|
||||||
ChangeKind::AccessOpenExecute | ChangeKind::AccessOpenRead | ChangeKind::AccessOpenWrite
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Produces a changeset containing all of the close access kinds
|
|
||||||
pub fn access_close_set() -> Self {
|
|
||||||
ChangeKind::AccessCloseExecute | ChangeKind::AccessCloseRead | ChangeKind::AccessCloseWrite
|
|
||||||
}
|
|
||||||
|
|
||||||
// Produces a changeset containing all of the modification kinds
|
|
||||||
pub fn modify_set() -> Self {
|
|
||||||
Self::modify_data_set() | Self::modify_metadata_set() | ChangeKind::Modify
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Produces a changeset containing all of the data modification kinds
|
|
||||||
pub fn modify_data_set() -> Self {
|
|
||||||
ChangeKind::Content | ChangeKind::Data | ChangeKind::Size
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Produces a changeset containing all of the metadata modification kinds
|
|
||||||
pub fn modify_metadata_set() -> Self {
|
|
||||||
ChangeKind::AccessTime
|
|
||||||
| ChangeKind::Metadata
|
|
||||||
| ChangeKind::Ownership
|
|
||||||
| ChangeKind::Permissions
|
|
||||||
| ChangeKind::WriteTime
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Produces a changeset containing all of the rename kinds
|
|
||||||
pub fn rename_set() -> Self {
|
|
||||||
ChangeKind::Rename | ChangeKind::RenameBoth | ChangeKind::RenameFrom | ChangeKind::RenameTo
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Consumes set and returns a sorted vec of the kinds of changes
|
|
||||||
pub fn into_sorted_vec(self) -> Vec<ChangeKind> {
|
|
||||||
let mut v = self.0.into_iter().collect::<Vec<_>>();
|
|
||||||
v.sort();
|
|
||||||
v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl ChangeKindSet {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(ChangeKindSet)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for ChangeKindSet {
|
|
||||||
/// Outputs a comma-separated series of [`ChangeKind`] as string that are sorted
|
|
||||||
/// such that this will always be consistent output
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
let mut kinds = self
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.map(ToString::to_string)
|
|
||||||
.collect::<Vec<String>>();
|
|
||||||
kinds.sort_unstable();
|
|
||||||
write!(f, "{}", kinds.join(","))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for ChangeKindSet {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.to_string() == other.to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for ChangeKindSet {}
|
|
||||||
|
|
||||||
impl Hash for ChangeKindSet {
|
|
||||||
/// Hashes based on the output of [`fmt::Display`]
|
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
|
||||||
self.to_string().hash(state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BitOr<ChangeKindSet> for ChangeKindSet {
|
|
||||||
type Output = Self;
|
|
||||||
|
|
||||||
fn bitor(mut self, rhs: ChangeKindSet) -> Self::Output {
|
|
||||||
self.extend(rhs.0);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BitOr<ChangeKind> for ChangeKindSet {
|
|
||||||
type Output = Self;
|
|
||||||
|
|
||||||
fn bitor(mut self, rhs: ChangeKind) -> Self::Output {
|
|
||||||
self.0.insert(rhs);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BitOr<ChangeKindSet> for ChangeKind {
|
|
||||||
type Output = ChangeKindSet;
|
|
||||||
|
|
||||||
fn bitor(self, rhs: ChangeKindSet) -> Self::Output {
|
|
||||||
rhs | self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Sub<ChangeKindSet> for ChangeKindSet {
|
|
||||||
type Output = Self;
|
|
||||||
|
|
||||||
fn sub(self, other: Self) -> Self::Output {
|
|
||||||
ChangeKindSet(&self.0 - &other.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Sub<&'_ ChangeKindSet> for &ChangeKindSet {
|
|
||||||
type Output = ChangeKindSet;
|
|
||||||
|
|
||||||
fn sub(self, other: &ChangeKindSet) -> Self::Output {
|
|
||||||
ChangeKindSet(&self.0 - &other.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for ChangeKindSet {
|
|
||||||
type Err = strum::ParseError;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
let mut change_set = HashSet::new();
|
|
||||||
|
|
||||||
for word in s.split(',') {
|
|
||||||
change_set.insert(ChangeKind::from_str(word.trim())?);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(ChangeKindSet(change_set))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromIterator<ChangeKind> for ChangeKindSet {
|
|
||||||
fn from_iter<I: IntoIterator<Item = ChangeKind>>(iter: I) -> Self {
|
|
||||||
let mut change_set = HashSet::new();
|
|
||||||
|
|
||||||
for i in iter {
|
|
||||||
change_set.insert(i);
|
|
||||||
}
|
|
||||||
|
|
||||||
ChangeKindSet(change_set)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<ChangeKind> for ChangeKindSet {
|
|
||||||
fn from(change_kind: ChangeKind) -> Self {
|
|
||||||
let mut set = Self::empty();
|
|
||||||
set.insert(change_kind);
|
|
||||||
set
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Vec<ChangeKind>> for ChangeKindSet {
|
|
||||||
fn from(changes: Vec<ChangeKind>) -> Self {
|
|
||||||
changes.into_iter().collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for ChangeKindSet {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::empty()
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,53 +0,0 @@
|
|||||||
use std::ops::{Deref, DerefMut};
|
|
||||||
|
|
||||||
use derive_more::{Display, From, Into};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
/// Represents some command with arguments to execute
|
|
||||||
#[derive(Clone, Debug, Display, From, Into, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct Cmd(String);
|
|
||||||
|
|
||||||
impl Cmd {
|
|
||||||
/// Creates a new command from the given `cmd`
|
|
||||||
pub fn new(cmd: impl Into<String>) -> Self {
|
|
||||||
Self(cmd.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns reference to the program portion of the command
|
|
||||||
pub fn program(&self) -> &str {
|
|
||||||
match self.0.split_once(' ') {
|
|
||||||
Some((program, _)) => program.trim(),
|
|
||||||
None => self.0.trim(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns reference to the arguments portion of the command
|
|
||||||
pub fn arguments(&self) -> &str {
|
|
||||||
match self.0.split_once(' ') {
|
|
||||||
Some((_, arguments)) => arguments.trim(),
|
|
||||||
None => "",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl Cmd {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(Cmd)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for Cmd {
|
|
||||||
type Target = String;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DerefMut for Cmd {
|
|
||||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
|
||||||
&mut self.0
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,59 +0,0 @@
|
|||||||
use std::fs::FileType as StdFileType;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use derive_more::IsVariant;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use strum::AsRefStr;
|
|
||||||
|
|
||||||
/// Represents information about a single entry within a directory
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
|
||||||
pub struct DirEntry {
|
|
||||||
/// Represents the full path to the entry
|
|
||||||
pub path: PathBuf,
|
|
||||||
|
|
||||||
/// Represents the type of the entry as a file/dir/symlink
|
|
||||||
pub file_type: FileType,
|
|
||||||
|
|
||||||
/// Depth at which this entry was created relative to the root (0 being immediately within
|
|
||||||
/// root)
|
|
||||||
pub depth: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl DirEntry {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(DirEntry)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents the type associated with a dir entry
|
|
||||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, AsRefStr, IsVariant, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
|
||||||
#[strum(serialize_all = "snake_case")]
|
|
||||||
pub enum FileType {
|
|
||||||
Dir,
|
|
||||||
File,
|
|
||||||
Symlink,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<StdFileType> for FileType {
|
|
||||||
fn from(ft: StdFileType) -> Self {
|
|
||||||
if ft.is_dir() {
|
|
||||||
Self::Dir
|
|
||||||
} else if ft.is_symlink() {
|
|
||||||
Self::Symlink
|
|
||||||
} else {
|
|
||||||
Self::File
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl FileType {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(FileType)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,404 +0,0 @@
|
|||||||
use std::io;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::time::SystemTime;
|
|
||||||
|
|
||||||
use bitflags::bitflags;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use super::{deserialize_u128_option, serialize_u128_option, FileType};
|
|
||||||
|
|
||||||
/// Represents metadata about some path on a remote machine
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct Metadata {
|
|
||||||
/// Canonicalized path to the file or directory, resolving symlinks, only included
|
|
||||||
/// if flagged during the request
|
|
||||||
pub canonicalized_path: Option<PathBuf>,
|
|
||||||
|
|
||||||
/// Represents the type of the entry as a file/dir/symlink
|
|
||||||
pub file_type: FileType,
|
|
||||||
|
|
||||||
/// Size of the file/directory/symlink in bytes
|
|
||||||
pub len: u64,
|
|
||||||
|
|
||||||
/// Whether or not the file/directory/symlink is marked as unwriteable
|
|
||||||
pub readonly: bool,
|
|
||||||
|
|
||||||
/// Represents the last time (in milliseconds) when the file/directory/symlink was accessed;
|
|
||||||
/// can be optional as certain systems don't support this
|
|
||||||
#[serde(serialize_with = "serialize_u128_option")]
|
|
||||||
#[serde(deserialize_with = "deserialize_u128_option")]
|
|
||||||
pub accessed: Option<u128>,
|
|
||||||
|
|
||||||
/// Represents when (in milliseconds) the file/directory/symlink was created;
|
|
||||||
/// can be optional as certain systems don't support this
|
|
||||||
#[serde(serialize_with = "serialize_u128_option")]
|
|
||||||
#[serde(deserialize_with = "deserialize_u128_option")]
|
|
||||||
pub created: Option<u128>,
|
|
||||||
|
|
||||||
/// Represents the last time (in milliseconds) when the file/directory/symlink was modified;
|
|
||||||
/// can be optional as certain systems don't support this
|
|
||||||
#[serde(serialize_with = "serialize_u128_option")]
|
|
||||||
#[serde(deserialize_with = "deserialize_u128_option")]
|
|
||||||
pub modified: Option<u128>,
|
|
||||||
|
|
||||||
/// Represents metadata that is specific to a unix remote machine
|
|
||||||
pub unix: Option<UnixMetadata>,
|
|
||||||
|
|
||||||
/// Represents metadata that is specific to a windows remote machine
|
|
||||||
pub windows: Option<WindowsMetadata>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Metadata {
|
|
||||||
pub async fn read(
|
|
||||||
path: impl AsRef<Path>,
|
|
||||||
canonicalize: bool,
|
|
||||||
resolve_file_type: bool,
|
|
||||||
) -> io::Result<Self> {
|
|
||||||
let metadata = tokio::fs::symlink_metadata(path.as_ref()).await?;
|
|
||||||
let canonicalized_path = if canonicalize {
|
|
||||||
Some(tokio::fs::canonicalize(path.as_ref()).await?)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
// If asking for resolved file type and current type is symlink, then we want to refresh
|
|
||||||
// our metadata to get the filetype for the resolved link
|
|
||||||
let file_type = if resolve_file_type && metadata.file_type().is_symlink() {
|
|
||||||
tokio::fs::metadata(path).await?.file_type()
|
|
||||||
} else {
|
|
||||||
metadata.file_type()
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
canonicalized_path,
|
|
||||||
accessed: metadata
|
|
||||||
.accessed()
|
|
||||||
.ok()
|
|
||||||
.and_then(|t| t.duration_since(SystemTime::UNIX_EPOCH).ok())
|
|
||||||
.map(|d| d.as_millis()),
|
|
||||||
created: metadata
|
|
||||||
.created()
|
|
||||||
.ok()
|
|
||||||
.and_then(|t| t.duration_since(SystemTime::UNIX_EPOCH).ok())
|
|
||||||
.map(|d| d.as_millis()),
|
|
||||||
modified: metadata
|
|
||||||
.modified()
|
|
||||||
.ok()
|
|
||||||
.and_then(|t| t.duration_since(SystemTime::UNIX_EPOCH).ok())
|
|
||||||
.map(|d| d.as_millis()),
|
|
||||||
len: metadata.len(),
|
|
||||||
readonly: metadata.permissions().readonly(),
|
|
||||||
file_type: if file_type.is_dir() {
|
|
||||||
FileType::Dir
|
|
||||||
} else if file_type.is_file() {
|
|
||||||
FileType::File
|
|
||||||
} else {
|
|
||||||
FileType::Symlink
|
|
||||||
},
|
|
||||||
|
|
||||||
#[cfg(unix)]
|
|
||||||
unix: Some({
|
|
||||||
use std::os::unix::prelude::*;
|
|
||||||
let mode = metadata.mode();
|
|
||||||
crate::protocol::UnixMetadata::from(mode)
|
|
||||||
}),
|
|
||||||
#[cfg(not(unix))]
|
|
||||||
unix: None,
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
windows: Some({
|
|
||||||
use std::os::windows::prelude::*;
|
|
||||||
let attributes = metadata.file_attributes();
|
|
||||||
crate::protocol::WindowsMetadata::from(attributes)
|
|
||||||
}),
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
windows: None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl Metadata {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(Metadata)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents unix-specific metadata about some path on a remote machine
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct UnixMetadata {
|
|
||||||
/// Represents whether or not owner can read from the file
|
|
||||||
pub owner_read: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not owner can write to the file
|
|
||||||
pub owner_write: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not owner can execute the file
|
|
||||||
pub owner_exec: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not associated group can read from the file
|
|
||||||
pub group_read: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not associated group can write to the file
|
|
||||||
pub group_write: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not associated group can execute the file
|
|
||||||
pub group_exec: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not other can read from the file
|
|
||||||
pub other_read: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not other can write to the file
|
|
||||||
pub other_write: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not other can execute the file
|
|
||||||
pub other_exec: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl UnixMetadata {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(UnixMetadata)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<u32> for UnixMetadata {
|
|
||||||
/// Create from a unix mode bitset
|
|
||||||
fn from(mode: u32) -> Self {
|
|
||||||
let flags = UnixFilePermissionFlags::from_bits_truncate(mode);
|
|
||||||
Self {
|
|
||||||
owner_read: flags.contains(UnixFilePermissionFlags::OWNER_READ),
|
|
||||||
owner_write: flags.contains(UnixFilePermissionFlags::OWNER_WRITE),
|
|
||||||
owner_exec: flags.contains(UnixFilePermissionFlags::OWNER_EXEC),
|
|
||||||
group_read: flags.contains(UnixFilePermissionFlags::GROUP_READ),
|
|
||||||
group_write: flags.contains(UnixFilePermissionFlags::GROUP_WRITE),
|
|
||||||
group_exec: flags.contains(UnixFilePermissionFlags::GROUP_EXEC),
|
|
||||||
other_read: flags.contains(UnixFilePermissionFlags::OTHER_READ),
|
|
||||||
other_write: flags.contains(UnixFilePermissionFlags::OTHER_WRITE),
|
|
||||||
other_exec: flags.contains(UnixFilePermissionFlags::OTHER_EXEC),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<UnixMetadata> for u32 {
|
|
||||||
/// Convert to a unix mode bitset
|
|
||||||
fn from(metadata: UnixMetadata) -> Self {
|
|
||||||
let mut flags = UnixFilePermissionFlags::empty();
|
|
||||||
|
|
||||||
if metadata.owner_read {
|
|
||||||
flags.insert(UnixFilePermissionFlags::OWNER_READ);
|
|
||||||
}
|
|
||||||
if metadata.owner_write {
|
|
||||||
flags.insert(UnixFilePermissionFlags::OWNER_WRITE);
|
|
||||||
}
|
|
||||||
if metadata.owner_exec {
|
|
||||||
flags.insert(UnixFilePermissionFlags::OWNER_EXEC);
|
|
||||||
}
|
|
||||||
|
|
||||||
if metadata.group_read {
|
|
||||||
flags.insert(UnixFilePermissionFlags::GROUP_READ);
|
|
||||||
}
|
|
||||||
if metadata.group_write {
|
|
||||||
flags.insert(UnixFilePermissionFlags::GROUP_WRITE);
|
|
||||||
}
|
|
||||||
if metadata.group_exec {
|
|
||||||
flags.insert(UnixFilePermissionFlags::GROUP_EXEC);
|
|
||||||
}
|
|
||||||
|
|
||||||
if metadata.other_read {
|
|
||||||
flags.insert(UnixFilePermissionFlags::OTHER_READ);
|
|
||||||
}
|
|
||||||
if metadata.other_write {
|
|
||||||
flags.insert(UnixFilePermissionFlags::OTHER_WRITE);
|
|
||||||
}
|
|
||||||
if metadata.other_exec {
|
|
||||||
flags.insert(UnixFilePermissionFlags::OTHER_EXEC);
|
|
||||||
}
|
|
||||||
|
|
||||||
flags.bits()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UnixMetadata {
|
|
||||||
pub fn is_readonly(self) -> bool {
|
|
||||||
!(self.owner_read || self.group_read || self.other_read)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bitflags! {
|
|
||||||
struct UnixFilePermissionFlags: u32 {
|
|
||||||
const OWNER_READ = 0o400;
|
|
||||||
const OWNER_WRITE = 0o200;
|
|
||||||
const OWNER_EXEC = 0o100;
|
|
||||||
const GROUP_READ = 0o40;
|
|
||||||
const GROUP_WRITE = 0o20;
|
|
||||||
const GROUP_EXEC = 0o10;
|
|
||||||
const OTHER_READ = 0o4;
|
|
||||||
const OTHER_WRITE = 0o2;
|
|
||||||
const OTHER_EXEC = 0o1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents windows-specific metadata about some path on a remote machine
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct WindowsMetadata {
|
|
||||||
/// Represents whether or not a file or directory is an archive
|
|
||||||
pub archive: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not a file or directory is compressed
|
|
||||||
pub compressed: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not the file or directory is encrypted
|
|
||||||
pub encrypted: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not a file or directory is hidden
|
|
||||||
pub hidden: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not a directory or user data stream is configured with integrity
|
|
||||||
pub integrity_stream: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not a file does not have other attributes set
|
|
||||||
pub normal: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not a file or directory is not to be indexed by content indexing
|
|
||||||
/// service
|
|
||||||
pub not_content_indexed: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not a user data stream is not to be read by the background data
|
|
||||||
/// integrity scanner
|
|
||||||
pub no_scrub_data: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not the data of a file is not available immediately
|
|
||||||
pub offline: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not a file or directory is not fully present locally
|
|
||||||
pub recall_on_data_access: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not a file or directory has no physical representation on the local
|
|
||||||
/// system (is virtual)
|
|
||||||
pub recall_on_open: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not a file or directory has an associated reparse point, or a file is
|
|
||||||
/// a symbolic link
|
|
||||||
pub reparse_point: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not a file is a sparse file
|
|
||||||
pub sparse_file: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not a file or directory is used partially or exclusively by the
|
|
||||||
/// operating system
|
|
||||||
pub system: bool,
|
|
||||||
|
|
||||||
/// Represents whether or not a file is being used for temporary storage
|
|
||||||
pub temporary: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl WindowsMetadata {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(WindowsMetadata)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<u32> for WindowsMetadata {
|
|
||||||
/// Create from a windows file attribute bitset
|
|
||||||
fn from(file_attributes: u32) -> Self {
|
|
||||||
let flags = WindowsFileAttributeFlags::from_bits_truncate(file_attributes);
|
|
||||||
Self {
|
|
||||||
archive: flags.contains(WindowsFileAttributeFlags::ARCHIVE),
|
|
||||||
compressed: flags.contains(WindowsFileAttributeFlags::COMPRESSED),
|
|
||||||
encrypted: flags.contains(WindowsFileAttributeFlags::ENCRYPTED),
|
|
||||||
hidden: flags.contains(WindowsFileAttributeFlags::HIDDEN),
|
|
||||||
integrity_stream: flags.contains(WindowsFileAttributeFlags::INTEGRITY_SYSTEM),
|
|
||||||
normal: flags.contains(WindowsFileAttributeFlags::NORMAL),
|
|
||||||
not_content_indexed: flags.contains(WindowsFileAttributeFlags::NOT_CONTENT_INDEXED),
|
|
||||||
no_scrub_data: flags.contains(WindowsFileAttributeFlags::NO_SCRUB_DATA),
|
|
||||||
offline: flags.contains(WindowsFileAttributeFlags::OFFLINE),
|
|
||||||
recall_on_data_access: flags.contains(WindowsFileAttributeFlags::RECALL_ON_DATA_ACCESS),
|
|
||||||
recall_on_open: flags.contains(WindowsFileAttributeFlags::RECALL_ON_OPEN),
|
|
||||||
reparse_point: flags.contains(WindowsFileAttributeFlags::REPARSE_POINT),
|
|
||||||
sparse_file: flags.contains(WindowsFileAttributeFlags::SPARSE_FILE),
|
|
||||||
system: flags.contains(WindowsFileAttributeFlags::SYSTEM),
|
|
||||||
temporary: flags.contains(WindowsFileAttributeFlags::TEMPORARY),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<WindowsMetadata> for u32 {
|
|
||||||
/// Convert to a windows file attribute bitset
|
|
||||||
fn from(metadata: WindowsMetadata) -> Self {
|
|
||||||
let mut flags = WindowsFileAttributeFlags::empty();
|
|
||||||
|
|
||||||
if metadata.archive {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::ARCHIVE);
|
|
||||||
}
|
|
||||||
if metadata.compressed {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::COMPRESSED);
|
|
||||||
}
|
|
||||||
if metadata.encrypted {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::ENCRYPTED);
|
|
||||||
}
|
|
||||||
if metadata.hidden {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::HIDDEN);
|
|
||||||
}
|
|
||||||
if metadata.integrity_stream {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::INTEGRITY_SYSTEM);
|
|
||||||
}
|
|
||||||
if metadata.normal {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::NORMAL);
|
|
||||||
}
|
|
||||||
if metadata.not_content_indexed {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::NOT_CONTENT_INDEXED);
|
|
||||||
}
|
|
||||||
if metadata.no_scrub_data {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::NO_SCRUB_DATA);
|
|
||||||
}
|
|
||||||
if metadata.offline {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::OFFLINE);
|
|
||||||
}
|
|
||||||
if metadata.recall_on_data_access {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::RECALL_ON_DATA_ACCESS);
|
|
||||||
}
|
|
||||||
if metadata.recall_on_open {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::RECALL_ON_OPEN);
|
|
||||||
}
|
|
||||||
if metadata.reparse_point {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::REPARSE_POINT);
|
|
||||||
}
|
|
||||||
if metadata.sparse_file {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::SPARSE_FILE);
|
|
||||||
}
|
|
||||||
if metadata.system {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::SYSTEM);
|
|
||||||
}
|
|
||||||
if metadata.temporary {
|
|
||||||
flags.insert(WindowsFileAttributeFlags::TEMPORARY);
|
|
||||||
}
|
|
||||||
|
|
||||||
flags.bits()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bitflags! {
|
|
||||||
struct WindowsFileAttributeFlags: u32 {
|
|
||||||
const ARCHIVE = 0x20;
|
|
||||||
const COMPRESSED = 0x800;
|
|
||||||
const ENCRYPTED = 0x4000;
|
|
||||||
const HIDDEN = 0x2;
|
|
||||||
const INTEGRITY_SYSTEM = 0x8000;
|
|
||||||
const NORMAL = 0x80;
|
|
||||||
const NOT_CONTENT_INDEXED = 0x2000;
|
|
||||||
const NO_SCRUB_DATA = 0x20000;
|
|
||||||
const OFFLINE = 0x1000;
|
|
||||||
const RECALL_ON_DATA_ACCESS = 0x400000;
|
|
||||||
const RECALL_ON_OPEN = 0x40000;
|
|
||||||
const REPARSE_POINT = 0x400;
|
|
||||||
const SPARSE_FILE = 0x200;
|
|
||||||
const SYSTEM = 0x4;
|
|
||||||
const TEMPORARY = 0x100;
|
|
||||||
const VIRTUAL = 0x10000;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,294 +0,0 @@
|
|||||||
use bitflags::bitflags;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(default, deny_unknown_fields, rename_all = "snake_case")]
|
|
||||||
pub struct SetPermissionsOptions {
|
|
||||||
/// Whether or not to exclude symlinks from traversal entirely, meaning that permissions will
|
|
||||||
/// not be set on symlinks (usually resolving the symlink and setting the permission of the
|
|
||||||
/// referenced file or directory) that are explicitly provided or show up during recursion.
|
|
||||||
pub exclude_symlinks: bool,
|
|
||||||
|
|
||||||
/// Whether or not to traverse symlinks when recursively setting permissions. Note that this
|
|
||||||
/// does NOT influence setting permissions when encountering a symlink as most platforms will
|
|
||||||
/// resolve the symlink before setting permissions.
|
|
||||||
pub follow_symlinks: bool,
|
|
||||||
|
|
||||||
/// Whether or not to set the permissions of the file hierarchies rooted in the paths, instead
|
|
||||||
/// of just the paths themselves.
|
|
||||||
pub recursive: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl SetPermissionsOptions {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(SetPermissionsOptions)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents permissions to apply to some path on a remote machine
|
|
||||||
///
|
|
||||||
/// When used to set permissions on a file, directory, or symlink,
|
|
||||||
/// only fields that are set (not `None`) will be applied.
|
|
||||||
///
|
|
||||||
/// On `Unix` platforms, this translates directly into the mode that
|
|
||||||
/// you would find with `chmod`. On all other platforms, this uses the
|
|
||||||
/// write flags to determine whether or not to set the readonly status.
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct Permissions {
|
|
||||||
/// Represents whether or not owner can read from the file
|
|
||||||
pub owner_read: Option<bool>,
|
|
||||||
|
|
||||||
/// Represents whether or not owner can write to the file
|
|
||||||
pub owner_write: Option<bool>,
|
|
||||||
|
|
||||||
/// Represents whether or not owner can execute the file
|
|
||||||
pub owner_exec: Option<bool>,
|
|
||||||
|
|
||||||
/// Represents whether or not associated group can read from the file
|
|
||||||
pub group_read: Option<bool>,
|
|
||||||
|
|
||||||
/// Represents whether or not associated group can write to the file
|
|
||||||
pub group_write: Option<bool>,
|
|
||||||
|
|
||||||
/// Represents whether or not associated group can execute the file
|
|
||||||
pub group_exec: Option<bool>,
|
|
||||||
|
|
||||||
/// Represents whether or not other can read from the file
|
|
||||||
pub other_read: Option<bool>,
|
|
||||||
|
|
||||||
/// Represents whether or not other can write to the file
|
|
||||||
pub other_write: Option<bool>,
|
|
||||||
|
|
||||||
/// Represents whether or not other can execute the file
|
|
||||||
pub other_exec: Option<bool>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Permissions {
|
|
||||||
/// Creates a set of [`Permissions`] that indicate readonly status.
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use distant_core::protocol::Permissions;
|
|
||||||
///
|
|
||||||
/// let permissions = Permissions::readonly();
|
|
||||||
/// assert_eq!(permissions.is_readonly(), Some(true));
|
|
||||||
/// assert_eq!(permissions.is_writable(), Some(false));
|
|
||||||
/// ```
|
|
||||||
pub fn readonly() -> Self {
|
|
||||||
Self {
|
|
||||||
owner_write: Some(false),
|
|
||||||
group_write: Some(false),
|
|
||||||
other_write: Some(false),
|
|
||||||
|
|
||||||
owner_read: Some(true),
|
|
||||||
group_read: Some(true),
|
|
||||||
other_read: Some(true),
|
|
||||||
|
|
||||||
owner_exec: None,
|
|
||||||
group_exec: None,
|
|
||||||
other_exec: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// Creates a set of [`Permissions`] that indicate globally writable status.
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use distant_core::protocol::Permissions;
|
|
||||||
///
|
|
||||||
/// let permissions = Permissions::writable();
|
|
||||||
/// assert_eq!(permissions.is_readonly(), Some(false));
|
|
||||||
/// assert_eq!(permissions.is_writable(), Some(true));
|
|
||||||
/// ```
|
|
||||||
pub fn writable() -> Self {
|
|
||||||
Self {
|
|
||||||
owner_write: Some(true),
|
|
||||||
group_write: Some(true),
|
|
||||||
other_write: Some(true),
|
|
||||||
|
|
||||||
owner_read: Some(true),
|
|
||||||
group_read: Some(true),
|
|
||||||
other_read: Some(true),
|
|
||||||
|
|
||||||
owner_exec: None,
|
|
||||||
group_exec: None,
|
|
||||||
other_exec: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the permission set has a value specified for each permission (no `None`
|
|
||||||
/// settings).
|
|
||||||
pub fn is_complete(&self) -> bool {
|
|
||||||
self.owner_read.is_some()
|
|
||||||
&& self.owner_write.is_some()
|
|
||||||
&& self.owner_exec.is_some()
|
|
||||||
&& self.group_read.is_some()
|
|
||||||
&& self.group_write.is_some()
|
|
||||||
&& self.group_exec.is_some()
|
|
||||||
&& self.other_read.is_some()
|
|
||||||
&& self.other_write.is_some()
|
|
||||||
&& self.other_exec.is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if permissions represent readonly, `false` if permissions represent
|
|
||||||
/// writable, and `None` if no permissions have been set to indicate either status.
|
|
||||||
#[inline]
|
|
||||||
pub fn is_readonly(&self) -> Option<bool> {
|
|
||||||
// Negate the writable status to indicate whether or not readonly
|
|
||||||
self.is_writable().map(|x| !x)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if permissions represent ability to write, `false` if permissions represent
|
|
||||||
/// inability to write, and `None` if no permissions have been set to indicate either status.
|
|
||||||
#[inline]
|
|
||||||
pub fn is_writable(&self) -> Option<bool> {
|
|
||||||
self.owner_write
|
|
||||||
.zip(self.group_write)
|
|
||||||
.zip(self.other_write)
|
|
||||||
.map(|((owner, group), other)| owner || group || other)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Applies `other` settings to `self`, overwriting any of the permissions in `self` with `other`.
|
|
||||||
#[inline]
|
|
||||||
pub fn apply_from(&mut self, other: &Self) {
|
|
||||||
macro_rules! apply {
|
|
||||||
($key:ident) => {{
|
|
||||||
if let Some(value) = other.$key {
|
|
||||||
self.$key = Some(value);
|
|
||||||
}
|
|
||||||
}};
|
|
||||||
}
|
|
||||||
|
|
||||||
apply!(owner_read);
|
|
||||||
apply!(owner_write);
|
|
||||||
apply!(owner_exec);
|
|
||||||
apply!(group_read);
|
|
||||||
apply!(group_write);
|
|
||||||
apply!(group_exec);
|
|
||||||
apply!(other_read);
|
|
||||||
apply!(other_write);
|
|
||||||
apply!(other_exec);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Applies `self` settings to `other`, overwriting any of the permissions in `other` with
|
|
||||||
/// `self`.
|
|
||||||
#[inline]
|
|
||||||
pub fn apply_to(&self, other: &mut Self) {
|
|
||||||
Self::apply_from(other, self)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Converts a Unix `mode` into the permission set.
|
|
||||||
pub fn from_unix_mode(mode: u32) -> Self {
|
|
||||||
let flags = UnixFilePermissionFlags::from_bits_truncate(mode);
|
|
||||||
Self {
|
|
||||||
owner_read: Some(flags.contains(UnixFilePermissionFlags::OWNER_READ)),
|
|
||||||
owner_write: Some(flags.contains(UnixFilePermissionFlags::OWNER_WRITE)),
|
|
||||||
owner_exec: Some(flags.contains(UnixFilePermissionFlags::OWNER_EXEC)),
|
|
||||||
group_read: Some(flags.contains(UnixFilePermissionFlags::GROUP_READ)),
|
|
||||||
group_write: Some(flags.contains(UnixFilePermissionFlags::GROUP_WRITE)),
|
|
||||||
group_exec: Some(flags.contains(UnixFilePermissionFlags::GROUP_EXEC)),
|
|
||||||
other_read: Some(flags.contains(UnixFilePermissionFlags::OTHER_READ)),
|
|
||||||
other_write: Some(flags.contains(UnixFilePermissionFlags::OTHER_WRITE)),
|
|
||||||
other_exec: Some(flags.contains(UnixFilePermissionFlags::OTHER_EXEC)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Converts to a Unix `mode` from a permission set. For any missing setting, a 0 bit is used.
|
|
||||||
pub fn to_unix_mode(&self) -> u32 {
|
|
||||||
let mut flags = UnixFilePermissionFlags::empty();
|
|
||||||
|
|
||||||
macro_rules! is_true {
|
|
||||||
($opt:expr) => {{
|
|
||||||
$opt.is_some() && $opt.unwrap()
|
|
||||||
}};
|
|
||||||
}
|
|
||||||
|
|
||||||
if is_true!(self.owner_read) {
|
|
||||||
flags.insert(UnixFilePermissionFlags::OWNER_READ);
|
|
||||||
}
|
|
||||||
if is_true!(self.owner_write) {
|
|
||||||
flags.insert(UnixFilePermissionFlags::OWNER_WRITE);
|
|
||||||
}
|
|
||||||
if is_true!(self.owner_exec) {
|
|
||||||
flags.insert(UnixFilePermissionFlags::OWNER_EXEC);
|
|
||||||
}
|
|
||||||
|
|
||||||
if is_true!(self.group_read) {
|
|
||||||
flags.insert(UnixFilePermissionFlags::GROUP_READ);
|
|
||||||
}
|
|
||||||
if is_true!(self.group_write) {
|
|
||||||
flags.insert(UnixFilePermissionFlags::GROUP_WRITE);
|
|
||||||
}
|
|
||||||
if is_true!(self.group_exec) {
|
|
||||||
flags.insert(UnixFilePermissionFlags::GROUP_EXEC);
|
|
||||||
}
|
|
||||||
|
|
||||||
if is_true!(self.other_read) {
|
|
||||||
flags.insert(UnixFilePermissionFlags::OTHER_READ);
|
|
||||||
}
|
|
||||||
if is_true!(self.other_write) {
|
|
||||||
flags.insert(UnixFilePermissionFlags::OTHER_WRITE);
|
|
||||||
}
|
|
||||||
if is_true!(self.other_exec) {
|
|
||||||
flags.insert(UnixFilePermissionFlags::OTHER_EXEC);
|
|
||||||
}
|
|
||||||
|
|
||||||
flags.bits()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl Permissions {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(Permissions)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(unix)]
|
|
||||||
impl From<std::fs::Permissions> for Permissions {
|
|
||||||
/// Converts [`std::fs::Permissions`] into [`Permissions`] using
|
|
||||||
/// [`std::os::unix::fs::PermissionsExt::mode`] to supply the bitset.
|
|
||||||
fn from(permissions: std::fs::Permissions) -> Self {
|
|
||||||
use std::os::unix::prelude::*;
|
|
||||||
Self::from_unix_mode(permissions.mode())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(unix))]
|
|
||||||
impl From<std::fs::Permissions> for Permissions {
|
|
||||||
/// Converts [`std::fs::Permissions`] into [`Permissions`] using the `readonly` flag.
|
|
||||||
///
|
|
||||||
/// This will not set executable flags, but will set all read and write flags with write flags
|
|
||||||
/// being `false` if `readonly`, otherwise set to `true`.
|
|
||||||
fn from(permissions: std::fs::Permissions) -> Self {
|
|
||||||
if permissions.readonly() {
|
|
||||||
Self::readonly()
|
|
||||||
} else {
|
|
||||||
Self::writable()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(unix)]
|
|
||||||
impl From<Permissions> for std::fs::Permissions {
|
|
||||||
/// Converts [`Permissions`] into [`std::fs::Permissions`] using
|
|
||||||
/// [`std::os::unix::fs::PermissionsExt::from_mode`].
|
|
||||||
fn from(permissions: Permissions) -> Self {
|
|
||||||
use std::os::unix::prelude::*;
|
|
||||||
std::fs::Permissions::from_mode(permissions.to_unix_mode())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bitflags! {
|
|
||||||
struct UnixFilePermissionFlags: u32 {
|
|
||||||
const OWNER_READ = 0o400;
|
|
||||||
const OWNER_WRITE = 0o200;
|
|
||||||
const OWNER_EXEC = 0o100;
|
|
||||||
const GROUP_READ = 0o40;
|
|
||||||
const GROUP_WRITE = 0o20;
|
|
||||||
const GROUP_EXEC = 0o10;
|
|
||||||
const OTHER_READ = 0o4;
|
|
||||||
const OTHER_WRITE = 0o2;
|
|
||||||
const OTHER_EXEC = 0o1;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,140 +0,0 @@
|
|||||||
use std::fmt;
|
|
||||||
use std::num::ParseIntError;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use derive_more::{Display, Error};
|
|
||||||
use portable_pty::PtySize as PortablePtySize;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
/// Represents the size associated with a remote PTY
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct PtySize {
|
|
||||||
/// Number of lines of text
|
|
||||||
pub rows: u16,
|
|
||||||
|
|
||||||
/// Number of columns of text
|
|
||||||
pub cols: u16,
|
|
||||||
|
|
||||||
/// Width of a cell in pixels. Note that some systems never fill this value and ignore it.
|
|
||||||
#[serde(default)]
|
|
||||||
pub pixel_width: u16,
|
|
||||||
|
|
||||||
/// Height of a cell in pixels. Note that some systems never fill this value and ignore it.
|
|
||||||
#[serde(default)]
|
|
||||||
pub pixel_height: u16,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PtySize {
|
|
||||||
/// Creates new size using just rows and columns
|
|
||||||
pub fn from_rows_and_cols(rows: u16, cols: u16) -> Self {
|
|
||||||
Self {
|
|
||||||
rows,
|
|
||||||
cols,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl PtySize {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(PtySize)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<PortablePtySize> for PtySize {
|
|
||||||
fn from(size: PortablePtySize) -> Self {
|
|
||||||
Self {
|
|
||||||
rows: size.rows,
|
|
||||||
cols: size.cols,
|
|
||||||
pixel_width: size.pixel_width,
|
|
||||||
pixel_height: size.pixel_height,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<PtySize> for PortablePtySize {
|
|
||||||
fn from(size: PtySize) -> Self {
|
|
||||||
Self {
|
|
||||||
rows: size.rows,
|
|
||||||
cols: size.cols,
|
|
||||||
pixel_width: size.pixel_width,
|
|
||||||
pixel_height: size.pixel_height,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for PtySize {
|
|
||||||
/// Prints out `rows,cols[,pixel_width,pixel_height]` where the
|
|
||||||
/// pixel width and pixel height are only included if either
|
|
||||||
/// one of them is not zero
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{},{}", self.rows, self.cols)?;
|
|
||||||
if self.pixel_width > 0 || self.pixel_height > 0 {
|
|
||||||
write!(f, ",{},{}", self.pixel_width, self.pixel_height)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for PtySize {
|
|
||||||
fn default() -> Self {
|
|
||||||
PtySize {
|
|
||||||
rows: 24,
|
|
||||||
cols: 80,
|
|
||||||
pixel_width: 0,
|
|
||||||
pixel_height: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Display, Error)]
|
|
||||||
pub enum PtySizeParseError {
|
|
||||||
MissingRows,
|
|
||||||
MissingColumns,
|
|
||||||
InvalidRows(ParseIntError),
|
|
||||||
InvalidColumns(ParseIntError),
|
|
||||||
InvalidPixelWidth(ParseIntError),
|
|
||||||
InvalidPixelHeight(ParseIntError),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for PtySize {
|
|
||||||
type Err = PtySizeParseError;
|
|
||||||
|
|
||||||
/// Attempts to parse a str into PtySize using one of the following formats:
|
|
||||||
///
|
|
||||||
/// * rows,cols (defaults to 0 for pixel_width & pixel_height)
|
|
||||||
/// * rows,cols,pixel_width,pixel_height
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
let mut tokens = s.split(',');
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
rows: tokens
|
|
||||||
.next()
|
|
||||||
.ok_or(PtySizeParseError::MissingRows)?
|
|
||||||
.trim()
|
|
||||||
.parse()
|
|
||||||
.map_err(PtySizeParseError::InvalidRows)?,
|
|
||||||
cols: tokens
|
|
||||||
.next()
|
|
||||||
.ok_or(PtySizeParseError::MissingColumns)?
|
|
||||||
.trim()
|
|
||||||
.parse()
|
|
||||||
.map_err(PtySizeParseError::InvalidColumns)?,
|
|
||||||
pixel_width: tokens
|
|
||||||
.next()
|
|
||||||
.map(|s| s.trim().parse())
|
|
||||||
.transpose()
|
|
||||||
.map_err(PtySizeParseError::InvalidPixelWidth)?
|
|
||||||
.unwrap_or(0),
|
|
||||||
pixel_height: tokens
|
|
||||||
.next()
|
|
||||||
.map(|s| s.trim().parse())
|
|
||||||
.transpose()
|
|
||||||
.map_err(PtySizeParseError::InvalidPixelHeight)?
|
|
||||||
.unwrap_or(0),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,425 +0,0 @@
|
|||||||
use std::borrow::Cow;
|
|
||||||
use std::collections::HashSet;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use super::FileType;
|
|
||||||
|
|
||||||
/// Id associated with a search
|
|
||||||
pub type SearchId = u32;
|
|
||||||
|
|
||||||
/// Represents a query to perform against the filesystem
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct SearchQuery {
|
|
||||||
/// Kind of data to examine using condition
|
|
||||||
pub target: SearchQueryTarget,
|
|
||||||
|
|
||||||
/// Condition to meet to be considered a match
|
|
||||||
pub condition: SearchQueryCondition,
|
|
||||||
|
|
||||||
/// Paths in which to perform the query
|
|
||||||
pub paths: Vec<PathBuf>,
|
|
||||||
|
|
||||||
/// Options to apply to the query
|
|
||||||
#[serde(default)]
|
|
||||||
pub options: SearchQueryOptions,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl SearchQuery {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(SearchQuery)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for SearchQuery {
|
|
||||||
type Err = serde_json::error::Error;
|
|
||||||
|
|
||||||
/// Parses search query from a JSON string
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
serde_json::from_str(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Kind of data to examine using conditions
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum SearchQueryTarget {
|
|
||||||
/// Checks path of file, directory, or symlink
|
|
||||||
Path,
|
|
||||||
|
|
||||||
/// Checks contents of files
|
|
||||||
Contents,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl SearchQueryTarget {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(SearchQueryTarget)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Condition used to find a match in a search query
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(rename_all = "snake_case", deny_unknown_fields, tag = "type")]
|
|
||||||
pub enum SearchQueryCondition {
|
|
||||||
/// Text is found anywhere (all regex patterns are escaped)
|
|
||||||
Contains { value: String },
|
|
||||||
|
|
||||||
/// Begins with some text (all regex patterns are escaped)
|
|
||||||
EndsWith { value: String },
|
|
||||||
|
|
||||||
/// Matches some text exactly (all regex patterns are escaped)
|
|
||||||
Equals { value: String },
|
|
||||||
|
|
||||||
/// Any of the conditions match
|
|
||||||
Or { value: Vec<SearchQueryCondition> },
|
|
||||||
|
|
||||||
/// Matches some regex
|
|
||||||
Regex { value: String },
|
|
||||||
|
|
||||||
/// Begins with some text (all regex patterns are escaped)
|
|
||||||
StartsWith { value: String },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SearchQueryCondition {
|
|
||||||
/// Creates a new instance with `Contains` variant
|
|
||||||
pub fn contains(value: impl Into<String>) -> Self {
|
|
||||||
Self::Contains {
|
|
||||||
value: value.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new instance with `EndsWith` variant
|
|
||||||
pub fn ends_with(value: impl Into<String>) -> Self {
|
|
||||||
Self::EndsWith {
|
|
||||||
value: value.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new instance with `Equals` variant
|
|
||||||
pub fn equals(value: impl Into<String>) -> Self {
|
|
||||||
Self::Equals {
|
|
||||||
value: value.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new instance with `Or` variant
|
|
||||||
pub fn or<I, C>(value: I) -> Self
|
|
||||||
where
|
|
||||||
I: IntoIterator<Item = C>,
|
|
||||||
C: Into<SearchQueryCondition>,
|
|
||||||
{
|
|
||||||
Self::Or {
|
|
||||||
value: value.into_iter().map(|s| s.into()).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new instance with `Regex` variant
|
|
||||||
pub fn regex(value: impl Into<String>) -> Self {
|
|
||||||
Self::Regex {
|
|
||||||
value: value.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new instance with `StartsWith` variant
|
|
||||||
pub fn starts_with(value: impl Into<String>) -> Self {
|
|
||||||
Self::StartsWith {
|
|
||||||
value: value.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Converts the condition in a regex string
|
|
||||||
pub fn to_regex_string(&self) -> String {
|
|
||||||
match self {
|
|
||||||
Self::Contains { value } => regex::escape(value),
|
|
||||||
Self::EndsWith { value } => format!(r"{}$", regex::escape(value)),
|
|
||||||
Self::Equals { value } => format!(r"^{}$", regex::escape(value)),
|
|
||||||
Self::Regex { value } => value.to_string(),
|
|
||||||
Self::StartsWith { value } => format!(r"^{}", regex::escape(value)),
|
|
||||||
Self::Or { value } => {
|
|
||||||
let mut s = String::new();
|
|
||||||
for (i, condition) in value.iter().enumerate() {
|
|
||||||
if i > 0 {
|
|
||||||
s.push('|');
|
|
||||||
}
|
|
||||||
s.push_str(&condition.to_regex_string());
|
|
||||||
}
|
|
||||||
s
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl SearchQueryCondition {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(SearchQueryCondition)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for SearchQueryCondition {
|
|
||||||
type Err = std::convert::Infallible;
|
|
||||||
|
|
||||||
/// Parses search query from a JSON string
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
Ok(Self::regex(s))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Options associated with a search query
|
|
||||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(default)]
|
|
||||||
pub struct SearchQueryOptions {
|
|
||||||
/// Restrict search to only these file types (otherwise all are allowed).
|
|
||||||
pub allowed_file_types: HashSet<FileType>,
|
|
||||||
|
|
||||||
/// Regex to use to filter paths being searched to only those that match the include condition.
|
|
||||||
pub include: Option<SearchQueryCondition>,
|
|
||||||
|
|
||||||
/// Regex to use to filter paths being searched to only those that do not match the exclude.
|
|
||||||
/// condition
|
|
||||||
pub exclude: Option<SearchQueryCondition>,
|
|
||||||
|
|
||||||
/// If true, will search upward through parent directories rather than the traditional downward
|
|
||||||
/// search that recurses through all children directories.
|
|
||||||
///
|
|
||||||
/// Note that this will use maximum depth to apply to the reverse direction, and will only look
|
|
||||||
/// through each ancestor directory's immediate entries. In other words, this will not result
|
|
||||||
/// in recursing through sibling directories.
|
|
||||||
///
|
|
||||||
/// An upward search will ALWAYS search the contents of a directory, so this means providing a
|
|
||||||
/// path to a directory will search its entries EVEN if the max_depth is 0.
|
|
||||||
pub upward: bool,
|
|
||||||
|
|
||||||
/// Search should follow symbolic links.
|
|
||||||
pub follow_symbolic_links: bool,
|
|
||||||
|
|
||||||
/// Maximum results to return before stopping the query.
|
|
||||||
pub limit: Option<u64>,
|
|
||||||
|
|
||||||
/// Maximum depth (directories) to search
|
|
||||||
///
|
|
||||||
/// The smallest depth is 0 and always corresponds to the path given to the new function on
|
|
||||||
/// this type. Its direct descendents have depth 1, and their descendents have depth 2, and so
|
|
||||||
/// on.
|
|
||||||
///
|
|
||||||
/// Note that this will not simply filter the entries of the iterator, but it will actually
|
|
||||||
/// avoid descending into directories when the depth is exceeded.
|
|
||||||
pub max_depth: Option<u64>,
|
|
||||||
|
|
||||||
/// Amount of results to batch before sending back excluding final submission that will always
|
|
||||||
/// include the remaining results even if less than pagination request.
|
|
||||||
pub pagination: Option<u64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl SearchQueryOptions {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(SearchQueryOptions)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents a match for a search query
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(rename_all = "snake_case", deny_unknown_fields, tag = "type")]
|
|
||||||
pub enum SearchQueryMatch {
|
|
||||||
/// Matches part of a file's path
|
|
||||||
Path(SearchQueryPathMatch),
|
|
||||||
|
|
||||||
/// Matches part of a file's contents
|
|
||||||
Contents(SearchQueryContentsMatch),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SearchQueryMatch {
|
|
||||||
pub fn into_path_match(self) -> Option<SearchQueryPathMatch> {
|
|
||||||
match self {
|
|
||||||
Self::Path(x) => Some(x),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn into_contents_match(self) -> Option<SearchQueryContentsMatch> {
|
|
||||||
match self {
|
|
||||||
Self::Contents(x) => Some(x),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl SearchQueryMatch {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(SearchQueryMatch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents details for a match on a path
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct SearchQueryPathMatch {
|
|
||||||
/// Path associated with the match
|
|
||||||
pub path: PathBuf,
|
|
||||||
|
|
||||||
/// Collection of matches tied to `path` where each submatch's byte offset is relative to
|
|
||||||
/// `path`
|
|
||||||
pub submatches: Vec<SearchQuerySubmatch>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl SearchQueryPathMatch {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(SearchQueryPathMatch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents details for a match on a file's contents
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct SearchQueryContentsMatch {
|
|
||||||
/// Path to file whose contents match
|
|
||||||
pub path: PathBuf,
|
|
||||||
|
|
||||||
/// Line(s) that matched
|
|
||||||
pub lines: SearchQueryMatchData,
|
|
||||||
|
|
||||||
/// Line number where match starts (base index 1)
|
|
||||||
pub line_number: u64,
|
|
||||||
|
|
||||||
/// Absolute byte offset corresponding to the start of `lines` in the data being searched
|
|
||||||
pub absolute_offset: u64,
|
|
||||||
|
|
||||||
/// Collection of matches tied to `lines` where each submatch's byte offset is relative to
|
|
||||||
/// `lines` and not the overall content
|
|
||||||
pub submatches: Vec<SearchQuerySubmatch>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl SearchQueryContentsMatch {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(SearchQueryContentsMatch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct SearchQuerySubmatch {
|
|
||||||
/// Content matched by query
|
|
||||||
pub r#match: SearchQueryMatchData,
|
|
||||||
|
|
||||||
/// Byte offset representing start of submatch (inclusive)
|
|
||||||
pub start: u64,
|
|
||||||
|
|
||||||
/// Byte offset representing end of submatch (exclusive)
|
|
||||||
pub end: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl SearchQuerySubmatch {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(SearchQuerySubmatch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
#[serde(
|
|
||||||
rename_all = "snake_case",
|
|
||||||
deny_unknown_fields,
|
|
||||||
tag = "type",
|
|
||||||
content = "value"
|
|
||||||
)]
|
|
||||||
pub enum SearchQueryMatchData {
|
|
||||||
/// Match represented as UTF-8 text
|
|
||||||
Text(String),
|
|
||||||
|
|
||||||
/// Match represented as bytes
|
|
||||||
Bytes(Vec<u8>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SearchQueryMatchData {
|
|
||||||
/// Creates a new instance with `Text` variant
|
|
||||||
pub fn text(value: impl Into<String>) -> Self {
|
|
||||||
Self::Text(value.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new instance with `Bytes` variant
|
|
||||||
pub fn bytes(value: impl Into<Vec<u8>>) -> Self {
|
|
||||||
Self::Bytes(value.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the UTF-8 str reference to the data, if is valid UTF-8
|
|
||||||
pub fn to_str(&self) -> Option<&str> {
|
|
||||||
match self {
|
|
||||||
Self::Text(x) => Some(x),
|
|
||||||
Self::Bytes(x) => std::str::from_utf8(x).ok(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Converts data to a UTF-8 string, replacing any invalid UTF-8 sequences with
|
|
||||||
/// [`U+FFFD REPLACEMENT CHARACTER`](https://doc.rust-lang.org/nightly/core/char/const.REPLACEMENT_CHARACTER.html)
|
|
||||||
pub fn to_string_lossy(&self) -> Cow<'_, str> {
|
|
||||||
match self {
|
|
||||||
Self::Text(x) => Cow::Borrowed(x),
|
|
||||||
Self::Bytes(x) => String::from_utf8_lossy(x),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl SearchQueryMatchData {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(SearchQueryMatchData)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
mod search_query_condition {
|
|
||||||
use test_log::test;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn to_regex_string_should_convert_to_appropriate_regex_and_escape_as_needed() {
|
|
||||||
assert_eq!(
|
|
||||||
SearchQueryCondition::contains("t^es$t").to_regex_string(),
|
|
||||||
r"t\^es\$t"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
SearchQueryCondition::ends_with("t^es$t").to_regex_string(),
|
|
||||||
r"t\^es\$t$"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
SearchQueryCondition::equals("t^es$t").to_regex_string(),
|
|
||||||
r"^t\^es\$t$"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
SearchQueryCondition::or([
|
|
||||||
SearchQueryCondition::contains("t^es$t"),
|
|
||||||
SearchQueryCondition::equals("t^es$t"),
|
|
||||||
SearchQueryCondition::regex("^test$"),
|
|
||||||
])
|
|
||||||
.to_regex_string(),
|
|
||||||
r"t\^es\$t|^t\^es\$t$|^test$"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
SearchQueryCondition::regex("test").to_regex_string(),
|
|
||||||
"test"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
SearchQueryCondition::starts_with("t^es$t").to_regex_string(),
|
|
||||||
r"^t\^es\$t"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,59 +0,0 @@
|
|||||||
use std::env;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
/// Represents information about a system
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
|
||||||
pub struct SystemInfo {
|
|
||||||
/// Family of the operating system as described in
|
|
||||||
/// https://doc.rust-lang.org/std/env/consts/constant.FAMILY.html
|
|
||||||
pub family: String,
|
|
||||||
|
|
||||||
/// Name of the specific operating system as described in
|
|
||||||
/// https://doc.rust-lang.org/std/env/consts/constant.OS.html
|
|
||||||
pub os: String,
|
|
||||||
|
|
||||||
/// Architecture of the CPI as described in
|
|
||||||
/// https://doc.rust-lang.org/std/env/consts/constant.ARCH.html
|
|
||||||
pub arch: String,
|
|
||||||
|
|
||||||
/// Current working directory of the running server process
|
|
||||||
pub current_dir: PathBuf,
|
|
||||||
|
|
||||||
/// Primary separator for path components for the current platform
|
|
||||||
/// as defined in https://doc.rust-lang.org/std/path/constant.MAIN_SEPARATOR.html
|
|
||||||
pub main_separator: char,
|
|
||||||
|
|
||||||
/// Name of the user running the server process
|
|
||||||
pub username: String,
|
|
||||||
|
|
||||||
/// Default shell tied to user running the server process
|
|
||||||
pub shell: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "schemars")]
|
|
||||||
impl SystemInfo {
|
|
||||||
pub fn root_schema() -> schemars::schema::RootSchema {
|
|
||||||
schemars::schema_for!(SystemInfo)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for SystemInfo {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
family: env::consts::FAMILY.to_string(),
|
|
||||||
os: env::consts::OS.to_string(),
|
|
||||||
arch: env::consts::ARCH.to_string(),
|
|
||||||
current_dir: env::current_dir().unwrap_or_default(),
|
|
||||||
main_separator: std::path::MAIN_SEPARATOR,
|
|
||||||
username: whoami::username(),
|
|
||||||
shell: if cfg!(windows) {
|
|
||||||
env::var("ComSpec").unwrap_or_else(|_| String::from("cmd.exe"))
|
|
||||||
} else {
|
|
||||||
env::var("SHELL").unwrap_or_else(|_| String::from("/bin/sh"))
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -0,0 +1,29 @@
|
|||||||
|
[package]
|
||||||
|
name = "distant-protocol"
|
||||||
|
description = "Protocol library for distant, providing data structures used between the client and server"
|
||||||
|
categories = ["data-structures"]
|
||||||
|
keywords = ["protocol"]
|
||||||
|
version = "0.20.0-alpha.7"
|
||||||
|
authors = ["Chip Senkbeil <chip@senkbeil.org>"]
|
||||||
|
edition = "2021"
|
||||||
|
homepage = "https://github.com/chipsenkbeil/distant"
|
||||||
|
repository = "https://github.com/chipsenkbeil/distant"
|
||||||
|
readme = "README.md"
|
||||||
|
license = "MIT OR Apache-2.0"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = []
|
||||||
|
tests = []
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
bitflags = "2.0.2"
|
||||||
|
derive_more = { version = "0.99.17", default-features = false, features = ["deref", "deref_mut", "display", "from", "error", "into", "into_iterator", "is_variant"] }
|
||||||
|
regex = "1.7.3"
|
||||||
|
serde = { version = "1.0.159", features = ["derive"] }
|
||||||
|
serde_bytes = "0.11.9"
|
||||||
|
strum = { version = "0.24.1", features = ["derive"] }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
rmp = "0.8.11"
|
||||||
|
rmp-serde = "1.1.1"
|
||||||
|
serde_json = "1.0.96"
|
@ -0,0 +1,29 @@
|
|||||||
|
mod capabilities;
|
||||||
|
mod change;
|
||||||
|
mod cmd;
|
||||||
|
mod error;
|
||||||
|
mod filesystem;
|
||||||
|
mod metadata;
|
||||||
|
mod permissions;
|
||||||
|
mod pty;
|
||||||
|
mod search;
|
||||||
|
mod system;
|
||||||
|
mod version;
|
||||||
|
|
||||||
|
pub use capabilities::*;
|
||||||
|
pub use change::*;
|
||||||
|
pub use cmd::*;
|
||||||
|
pub use error::*;
|
||||||
|
pub use filesystem::*;
|
||||||
|
pub use metadata::*;
|
||||||
|
pub use permissions::*;
|
||||||
|
pub use pty::*;
|
||||||
|
pub use search::*;
|
||||||
|
pub use system::*;
|
||||||
|
pub use version::*;
|
||||||
|
|
||||||
|
/// Id for a remote process
|
||||||
|
pub type ProcessId = u32;
|
||||||
|
|
||||||
|
/// Version indicated by the tuple of (major, minor, patch).
|
||||||
|
pub type SemVer = (u8, u8, u8);
|
@ -0,0 +1,380 @@
|
|||||||
|
use std::cmp::Ordering;
|
||||||
|
use std::collections::HashSet;
|
||||||
|
use std::hash::{Hash, Hasher};
|
||||||
|
use std::ops::{BitAnd, BitOr, BitXor, Deref, DerefMut};
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use derive_more::{From, Into, IntoIterator};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use strum::{EnumMessage, IntoEnumIterator};
|
||||||
|
|
||||||
|
/// Represents the kinds of capabilities available.
|
||||||
|
pub use crate::request::RequestKind as CapabilityKind;
|
||||||
|
|
||||||
|
/// Set of supported capabilities for a server
|
||||||
|
#[derive(Clone, Debug, From, Into, PartialEq, Eq, IntoIterator, Serialize, Deserialize)]
|
||||||
|
#[serde(transparent)]
|
||||||
|
pub struct Capabilities(#[into_iterator(owned, ref)] HashSet<Capability>);
|
||||||
|
|
||||||
|
impl Capabilities {
|
||||||
|
/// Return set of capabilities encompassing all possible capabilities
|
||||||
|
pub fn all() -> Self {
|
||||||
|
Self(CapabilityKind::iter().map(Capability::from).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return empty set of capabilities
|
||||||
|
pub fn none() -> Self {
|
||||||
|
Self(HashSet::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the capability with described kind is included
|
||||||
|
pub fn contains(&self, kind: impl AsRef<str>) -> bool {
|
||||||
|
let cap = Capability {
|
||||||
|
kind: kind.as_ref().to_string(),
|
||||||
|
description: String::new(),
|
||||||
|
};
|
||||||
|
self.0.contains(&cap)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds the specified capability to the set of capabilities
|
||||||
|
///
|
||||||
|
/// * If the set did not have this capability, returns `true`
|
||||||
|
/// * If the set did have this capability, returns `false`
|
||||||
|
pub fn insert(&mut self, cap: impl Into<Capability>) -> bool {
|
||||||
|
self.0.insert(cap.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Removes the capability with the described kind, returning the capability
|
||||||
|
pub fn take(&mut self, kind: impl AsRef<str>) -> Option<Capability> {
|
||||||
|
let cap = Capability {
|
||||||
|
kind: kind.as_ref().to_string(),
|
||||||
|
description: String::new(),
|
||||||
|
};
|
||||||
|
self.0.take(&cap)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Removes the capability with the described kind, returning true if it existed
|
||||||
|
pub fn remove(&mut self, kind: impl AsRef<str>) -> bool {
|
||||||
|
let cap = Capability {
|
||||||
|
kind: kind.as_ref().to_string(),
|
||||||
|
description: String::new(),
|
||||||
|
};
|
||||||
|
self.0.remove(&cap)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts into vec of capabilities sorted by kind
|
||||||
|
pub fn into_sorted_vec(self) -> Vec<Capability> {
|
||||||
|
let mut this = self.0.into_iter().collect::<Vec<_>>();
|
||||||
|
|
||||||
|
this.sort_unstable();
|
||||||
|
|
||||||
|
this
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<HashSet<Capability>> for Capabilities {
|
||||||
|
fn as_ref(&self) -> &HashSet<Capability> {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsMut<HashSet<Capability>> for Capabilities {
|
||||||
|
fn as_mut(&mut self) -> &mut HashSet<Capability> {
|
||||||
|
&mut self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for Capabilities {
|
||||||
|
type Target = HashSet<Capability>;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DerefMut for Capabilities {
|
||||||
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
|
&mut self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BitAnd for &Capabilities {
|
||||||
|
type Output = Capabilities;
|
||||||
|
|
||||||
|
fn bitand(self, rhs: Self) -> Self::Output {
|
||||||
|
Capabilities(self.0.bitand(&rhs.0))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BitOr for &Capabilities {
|
||||||
|
type Output = Capabilities;
|
||||||
|
|
||||||
|
fn bitor(self, rhs: Self) -> Self::Output {
|
||||||
|
Capabilities(self.0.bitor(&rhs.0))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BitOr<Capability> for &Capabilities {
|
||||||
|
type Output = Capabilities;
|
||||||
|
|
||||||
|
fn bitor(self, rhs: Capability) -> Self::Output {
|
||||||
|
let mut other = Capabilities::none();
|
||||||
|
other.0.insert(rhs);
|
||||||
|
|
||||||
|
self.bitor(&other)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BitXor for &Capabilities {
|
||||||
|
type Output = Capabilities;
|
||||||
|
|
||||||
|
fn bitxor(self, rhs: Self) -> Self::Output {
|
||||||
|
Capabilities(self.0.bitxor(&rhs.0))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromIterator<Capability> for Capabilities {
|
||||||
|
fn from_iter<I: IntoIterator<Item = Capability>>(iter: I) -> Self {
|
||||||
|
let mut this = Capabilities::none();
|
||||||
|
|
||||||
|
for capability in iter {
|
||||||
|
this.0.insert(capability);
|
||||||
|
}
|
||||||
|
|
||||||
|
this
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Capability tied to a server. A capability is equivalent based on its kind and not description.
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
||||||
|
pub struct Capability {
|
||||||
|
/// Label describing the kind of capability
|
||||||
|
pub kind: String,
|
||||||
|
|
||||||
|
/// Information about the capability
|
||||||
|
pub description: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Capability {
|
||||||
|
/// Will convert the [`Capability`]'s `kind` into a known [`CapabilityKind`] if possible,
|
||||||
|
/// returning None if the capability is unknown
|
||||||
|
pub fn to_capability_kind(&self) -> Option<CapabilityKind> {
|
||||||
|
CapabilityKind::from_str(&self.kind).ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the described capability is unknown
|
||||||
|
pub fn is_unknown(&self) -> bool {
|
||||||
|
self.to_capability_kind().is_none()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for Capability {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.kind.eq_ignore_ascii_case(&other.kind)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for Capability {}
|
||||||
|
|
||||||
|
impl PartialOrd for Capability {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for Capability {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
|
self.kind
|
||||||
|
.to_ascii_lowercase()
|
||||||
|
.cmp(&other.kind.to_ascii_lowercase())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Hash for Capability {
|
||||||
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
|
self.kind.to_ascii_lowercase().hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CapabilityKind> for Capability {
|
||||||
|
/// Creates a new capability using the kind's default message
|
||||||
|
fn from(kind: CapabilityKind) -> Self {
|
||||||
|
Self {
|
||||||
|
kind: kind.to_string(),
|
||||||
|
description: kind
|
||||||
|
.get_message()
|
||||||
|
.map(ToString::to_string)
|
||||||
|
.unwrap_or_default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
mod capabilities {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_json() {
|
||||||
|
let capabilities: Capabilities = [Capability {
|
||||||
|
kind: "some kind".to_string(),
|
||||||
|
description: "some description".to_string(),
|
||||||
|
}]
|
||||||
|
.into_iter()
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let value = serde_json::to_value(capabilities).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
value,
|
||||||
|
serde_json::json!([
|
||||||
|
{
|
||||||
|
"kind": "some kind",
|
||||||
|
"description": "some description",
|
||||||
|
}
|
||||||
|
])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_json() {
|
||||||
|
let value = serde_json::json!([
|
||||||
|
{
|
||||||
|
"kind": "some kind",
|
||||||
|
"description": "some description",
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
|
||||||
|
let capabilities: Capabilities = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
capabilities,
|
||||||
|
[Capability {
|
||||||
|
kind: "some kind".to_string(),
|
||||||
|
description: "some description".to_string(),
|
||||||
|
}]
|
||||||
|
.into_iter()
|
||||||
|
.collect()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_msgpack() {
|
||||||
|
let capabilities: Capabilities = [Capability {
|
||||||
|
kind: "some kind".to_string(),
|
||||||
|
description: "some description".to_string(),
|
||||||
|
}]
|
||||||
|
.into_iter()
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&capabilities).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or preventing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(
|
||||||
|
&[Capability {
|
||||||
|
kind: "some kind".to_string(),
|
||||||
|
description: "some description".to_string(),
|
||||||
|
}]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<Capabilities>(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let capabilities: Capabilities = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
capabilities,
|
||||||
|
[Capability {
|
||||||
|
kind: "some kind".to_string(),
|
||||||
|
description: "some description".to_string(),
|
||||||
|
}]
|
||||||
|
.into_iter()
|
||||||
|
.collect()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod capability {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_json() {
|
||||||
|
let capability = Capability {
|
||||||
|
kind: "some kind".to_string(),
|
||||||
|
description: "some description".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let value = serde_json::to_value(capability).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
value,
|
||||||
|
serde_json::json!({
|
||||||
|
"kind": "some kind",
|
||||||
|
"description": "some description",
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_json() {
|
||||||
|
let value = serde_json::json!({
|
||||||
|
"kind": "some kind",
|
||||||
|
"description": "some description",
|
||||||
|
});
|
||||||
|
|
||||||
|
let capability: Capability = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
capability,
|
||||||
|
Capability {
|
||||||
|
kind: "some kind".to_string(),
|
||||||
|
description: "some description".to_string(),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_msgpack() {
|
||||||
|
let capability = Capability {
|
||||||
|
kind: "some kind".to_string(),
|
||||||
|
description: "some description".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&capability).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or causing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&Capability {
|
||||||
|
kind: "some kind".to_string(),
|
||||||
|
description: "some description".to_string(),
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let capability: Capability = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
capability,
|
||||||
|
Capability {
|
||||||
|
kind: "some kind".to_string(),
|
||||||
|
description: "some description".to_string(),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,380 @@
|
|||||||
|
use std::collections::HashSet;
|
||||||
|
use std::fmt;
|
||||||
|
use std::hash::{Hash, Hasher};
|
||||||
|
use std::iter::FromIterator;
|
||||||
|
use std::ops::{BitOr, Sub};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use derive_more::{Deref, DerefMut, IntoIterator};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use strum::{EnumString, EnumVariantNames, VariantNames};
|
||||||
|
|
||||||
|
/// Change to one or more paths on the filesystem.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
||||||
|
pub struct Change {
|
||||||
|
/// Label describing the kind of change
|
||||||
|
pub kind: ChangeKind,
|
||||||
|
|
||||||
|
/// Paths that were changed
|
||||||
|
pub paths: Vec<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents a label attached to a [`Change`] that describes the kind of change.
|
||||||
|
///
|
||||||
|
/// This mirrors events seen from `incron`.
|
||||||
|
#[derive(
|
||||||
|
Copy,
|
||||||
|
Clone,
|
||||||
|
Debug,
|
||||||
|
strum::Display,
|
||||||
|
EnumString,
|
||||||
|
EnumVariantNames,
|
||||||
|
Hash,
|
||||||
|
PartialEq,
|
||||||
|
Eq,
|
||||||
|
PartialOrd,
|
||||||
|
Ord,
|
||||||
|
Serialize,
|
||||||
|
Deserialize,
|
||||||
|
)]
|
||||||
|
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
||||||
|
#[strum(serialize_all = "snake_case")]
|
||||||
|
pub enum ChangeKind {
|
||||||
|
/// A file was read
|
||||||
|
Access,
|
||||||
|
|
||||||
|
/// A file's or directory's attributes were changed
|
||||||
|
Attribute,
|
||||||
|
|
||||||
|
/// A file open for writing was closed
|
||||||
|
CloseWrite,
|
||||||
|
|
||||||
|
/// A file not open for writing was closed
|
||||||
|
CloseNoWrite,
|
||||||
|
|
||||||
|
/// A file, directory, or something else was created within a watched directory
|
||||||
|
Create,
|
||||||
|
|
||||||
|
/// A file, directory, or something else was deleted
|
||||||
|
Delete,
|
||||||
|
|
||||||
|
/// A file's content was modified
|
||||||
|
Modify,
|
||||||
|
|
||||||
|
/// A file was opened
|
||||||
|
Open,
|
||||||
|
|
||||||
|
/// A file, directory, or something else was renamed in some way
|
||||||
|
Rename,
|
||||||
|
|
||||||
|
/// Catch-all for any other change
|
||||||
|
Unknown,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ChangeKind {
|
||||||
|
/// Returns a list of all variants as str names
|
||||||
|
pub const fn variants() -> &'static [&'static str] {
|
||||||
|
Self::VARIANTS
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a list of all variants as a vec
|
||||||
|
pub fn all() -> Vec<ChangeKind> {
|
||||||
|
ChangeKindSet::all().into_sorted_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if kind is part of the access family.
|
||||||
|
pub fn is_access(&self) -> bool {
|
||||||
|
matches!(
|
||||||
|
self,
|
||||||
|
Self::Access | Self::CloseWrite | Self::CloseNoWrite | Self::Open
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if kind is part of the create family.
|
||||||
|
pub fn is_create(&self) -> bool {
|
||||||
|
matches!(self, Self::Create)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if kind is part of the delete family.
|
||||||
|
pub fn is_delete(&self) -> bool {
|
||||||
|
matches!(self, Self::Delete)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if kind is part of the modify family.
|
||||||
|
pub fn is_modify(&self) -> bool {
|
||||||
|
matches!(self, Self::Attribute | Self::Modify)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if kind is part of the rename family.
|
||||||
|
pub fn is_rename(&self) -> bool {
|
||||||
|
matches!(self, Self::Rename)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if kind is unknown.
|
||||||
|
pub fn is_unknown(&self) -> bool {
|
||||||
|
matches!(self, Self::Unknown)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BitOr for ChangeKind {
|
||||||
|
type Output = ChangeKindSet;
|
||||||
|
|
||||||
|
fn bitor(self, rhs: Self) -> Self::Output {
|
||||||
|
let mut set = ChangeKindSet::empty();
|
||||||
|
set.insert(self);
|
||||||
|
set.insert(rhs);
|
||||||
|
set
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents a distinct set of different change kinds
|
||||||
|
#[derive(Clone, Debug, Deref, DerefMut, IntoIterator, Serialize, Deserialize)]
|
||||||
|
pub struct ChangeKindSet(HashSet<ChangeKind>);
|
||||||
|
|
||||||
|
impl ChangeKindSet {
|
||||||
|
pub fn new(set: impl IntoIterator<Item = ChangeKind>) -> Self {
|
||||||
|
set.into_iter().collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Produces an empty set of [`ChangeKind`]
|
||||||
|
pub fn empty() -> Self {
|
||||||
|
Self(HashSet::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Produces a set of all [`ChangeKind`]
|
||||||
|
pub fn all() -> Self {
|
||||||
|
vec![
|
||||||
|
ChangeKind::Access,
|
||||||
|
ChangeKind::Attribute,
|
||||||
|
ChangeKind::CloseWrite,
|
||||||
|
ChangeKind::CloseNoWrite,
|
||||||
|
ChangeKind::Create,
|
||||||
|
ChangeKind::Delete,
|
||||||
|
ChangeKind::Modify,
|
||||||
|
ChangeKind::Open,
|
||||||
|
ChangeKind::Rename,
|
||||||
|
ChangeKind::Unknown,
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Consumes set and returns a sorted vec of the kinds of changes
|
||||||
|
pub fn into_sorted_vec(self) -> Vec<ChangeKind> {
|
||||||
|
let mut v = self.0.into_iter().collect::<Vec<_>>();
|
||||||
|
v.sort();
|
||||||
|
v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for ChangeKindSet {
|
||||||
|
/// Outputs a comma-separated series of [`ChangeKind`] as string that are sorted
|
||||||
|
/// such that this will always be consistent output
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
let mut kinds = self
|
||||||
|
.0
|
||||||
|
.iter()
|
||||||
|
.map(ToString::to_string)
|
||||||
|
.collect::<Vec<String>>();
|
||||||
|
kinds.sort_unstable();
|
||||||
|
write!(f, "{}", kinds.join(","))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for ChangeKindSet {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.to_string() == other.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for ChangeKindSet {}
|
||||||
|
|
||||||
|
impl Hash for ChangeKindSet {
|
||||||
|
/// Hashes based on the output of [`fmt::Display`]
|
||||||
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
|
self.to_string().hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BitOr<ChangeKindSet> for ChangeKindSet {
|
||||||
|
type Output = Self;
|
||||||
|
|
||||||
|
fn bitor(mut self, rhs: ChangeKindSet) -> Self::Output {
|
||||||
|
self.extend(rhs.0);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BitOr<ChangeKind> for ChangeKindSet {
|
||||||
|
type Output = Self;
|
||||||
|
|
||||||
|
fn bitor(mut self, rhs: ChangeKind) -> Self::Output {
|
||||||
|
self.0.insert(rhs);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BitOr<ChangeKindSet> for ChangeKind {
|
||||||
|
type Output = ChangeKindSet;
|
||||||
|
|
||||||
|
fn bitor(self, rhs: ChangeKindSet) -> Self::Output {
|
||||||
|
rhs | self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Sub<ChangeKindSet> for ChangeKindSet {
|
||||||
|
type Output = Self;
|
||||||
|
|
||||||
|
fn sub(self, other: Self) -> Self::Output {
|
||||||
|
ChangeKindSet(&self.0 - &other.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Sub<&'_ ChangeKindSet> for &ChangeKindSet {
|
||||||
|
type Output = ChangeKindSet;
|
||||||
|
|
||||||
|
fn sub(self, other: &ChangeKindSet) -> Self::Output {
|
||||||
|
ChangeKindSet(&self.0 - &other.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for ChangeKindSet {
|
||||||
|
type Err = strum::ParseError;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
let mut change_set = HashSet::new();
|
||||||
|
|
||||||
|
for word in s.split(',') {
|
||||||
|
change_set.insert(ChangeKind::from_str(word.trim())?);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ChangeKindSet(change_set))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromIterator<ChangeKind> for ChangeKindSet {
|
||||||
|
fn from_iter<I: IntoIterator<Item = ChangeKind>>(iter: I) -> Self {
|
||||||
|
let mut change_set = HashSet::new();
|
||||||
|
|
||||||
|
for i in iter {
|
||||||
|
change_set.insert(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
ChangeKindSet(change_set)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ChangeKind> for ChangeKindSet {
|
||||||
|
fn from(change_kind: ChangeKind) -> Self {
|
||||||
|
let mut set = Self::empty();
|
||||||
|
set.insert(change_kind);
|
||||||
|
set
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Vec<ChangeKind>> for ChangeKindSet {
|
||||||
|
fn from(changes: Vec<ChangeKind>) -> Self {
|
||||||
|
changes.into_iter().collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for ChangeKindSet {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
mod change_kind_set {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_json() {
|
||||||
|
let set = ChangeKindSet::new([ChangeKind::CloseWrite]);
|
||||||
|
|
||||||
|
let value = serde_json::to_value(set).unwrap();
|
||||||
|
assert_eq!(value, serde_json::json!(["close_write"]));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_json() {
|
||||||
|
let value = serde_json::json!(["close_write"]);
|
||||||
|
|
||||||
|
let set: ChangeKindSet = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(set, ChangeKindSet::new([ChangeKind::CloseWrite]));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_msgpack() {
|
||||||
|
let set = ChangeKindSet::new([ChangeKind::CloseWrite]);
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&set).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or causing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf =
|
||||||
|
rmp_serde::encode::to_vec_named(&ChangeKindSet::new([ChangeKind::CloseWrite]))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let set: ChangeKindSet = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(set, ChangeKindSet::new([ChangeKind::CloseWrite]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod change_kind {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_json() {
|
||||||
|
let kind = ChangeKind::CloseWrite;
|
||||||
|
|
||||||
|
let value = serde_json::to_value(kind).unwrap();
|
||||||
|
assert_eq!(value, serde_json::json!("close_write"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_json() {
|
||||||
|
let value = serde_json::json!("close_write");
|
||||||
|
|
||||||
|
let kind: ChangeKind = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(kind, ChangeKind::CloseWrite);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_msgpack() {
|
||||||
|
let kind = ChangeKind::CloseWrite;
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&kind).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or causing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&ChangeKind::CloseWrite).unwrap();
|
||||||
|
|
||||||
|
let kind: ChangeKind = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(kind, ChangeKind::CloseWrite);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,89 @@
|
|||||||
|
use std::ops::{Deref, DerefMut};
|
||||||
|
|
||||||
|
use derive_more::{Display, From, Into};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// Represents some command with arguments to execute
|
||||||
|
#[derive(Clone, Debug, Display, From, Into, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct Cmd(String);
|
||||||
|
|
||||||
|
impl Cmd {
|
||||||
|
/// Creates a new command from the given `cmd`
|
||||||
|
pub fn new(cmd: impl Into<String>) -> Self {
|
||||||
|
Self(cmd.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns reference to the program portion of the command
|
||||||
|
pub fn program(&self) -> &str {
|
||||||
|
match self.0.split_once(' ') {
|
||||||
|
Some((program, _)) => program.trim(),
|
||||||
|
None => self.0.trim(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns reference to the arguments portion of the command
|
||||||
|
pub fn arguments(&self) -> &str {
|
||||||
|
match self.0.split_once(' ') {
|
||||||
|
Some((_, arguments)) => arguments.trim(),
|
||||||
|
None => "",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for Cmd {
|
||||||
|
type Target = String;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DerefMut for Cmd {
|
||||||
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
|
&mut self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_json() {
|
||||||
|
let cmd = Cmd::new("echo some text");
|
||||||
|
|
||||||
|
let value = serde_json::to_value(cmd).unwrap();
|
||||||
|
assert_eq!(value, serde_json::json!("echo some text"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_json() {
|
||||||
|
let value = serde_json::json!("echo some text");
|
||||||
|
|
||||||
|
let cmd: Cmd = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(cmd, Cmd::new("echo some text"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_msgpack() {
|
||||||
|
let cmd = Cmd::new("echo some text");
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&cmd).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or causing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&Cmd::new("echo some text")).unwrap();
|
||||||
|
|
||||||
|
let cmd: Cmd = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(cmd, Cmd::new("echo some text"));
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,173 @@
|
|||||||
|
use std::fs::FileType as StdFileType;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use derive_more::IsVariant;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use strum::AsRefStr;
|
||||||
|
|
||||||
|
/// Represents information about a single entry within a directory
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
||||||
|
pub struct DirEntry {
|
||||||
|
/// Represents the full path to the entry
|
||||||
|
pub path: PathBuf,
|
||||||
|
|
||||||
|
/// Represents the type of the entry as a file/dir/symlink
|
||||||
|
pub file_type: FileType,
|
||||||
|
|
||||||
|
/// Depth at which this entry was created relative to the root (0 being immediately within
|
||||||
|
/// root)
|
||||||
|
pub depth: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents the type associated with a dir entry
|
||||||
|
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, AsRefStr, IsVariant, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
||||||
|
#[strum(serialize_all = "snake_case")]
|
||||||
|
pub enum FileType {
|
||||||
|
Dir,
|
||||||
|
File,
|
||||||
|
Symlink,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<StdFileType> for FileType {
|
||||||
|
fn from(ft: StdFileType) -> Self {
|
||||||
|
if ft.is_dir() {
|
||||||
|
Self::Dir
|
||||||
|
} else if ft.is_symlink() {
|
||||||
|
Self::Symlink
|
||||||
|
} else {
|
||||||
|
Self::File
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
mod dir_entry {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_json() {
|
||||||
|
let entry = DirEntry {
|
||||||
|
path: PathBuf::from("dir").join("file"),
|
||||||
|
file_type: FileType::File,
|
||||||
|
depth: 1,
|
||||||
|
};
|
||||||
|
|
||||||
|
let path = entry.path.to_str().unwrap().to_string();
|
||||||
|
let value = serde_json::to_value(entry).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
value,
|
||||||
|
serde_json::json!({
|
||||||
|
"path": path,
|
||||||
|
"file_type": "file",
|
||||||
|
"depth": 1,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_json() {
|
||||||
|
let value = serde_json::json!({
|
||||||
|
"path": "test-file",
|
||||||
|
"file_type": "file",
|
||||||
|
"depth": 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
let entry: DirEntry = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
entry,
|
||||||
|
DirEntry {
|
||||||
|
path: PathBuf::from("test-file"),
|
||||||
|
file_type: FileType::File,
|
||||||
|
depth: 0,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_msgpack() {
|
||||||
|
let entry = DirEntry {
|
||||||
|
path: PathBuf::from("dir").join("file"),
|
||||||
|
file_type: FileType::File,
|
||||||
|
depth: 1,
|
||||||
|
};
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&entry).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or causing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&DirEntry {
|
||||||
|
path: PathBuf::from("test-file"),
|
||||||
|
file_type: FileType::File,
|
||||||
|
depth: 0,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let entry: DirEntry = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
entry,
|
||||||
|
DirEntry {
|
||||||
|
path: PathBuf::from("test-file"),
|
||||||
|
file_type: FileType::File,
|
||||||
|
depth: 0,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod file_type {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_json() {
|
||||||
|
let ty = FileType::File;
|
||||||
|
|
||||||
|
let value = serde_json::to_value(ty).unwrap();
|
||||||
|
assert_eq!(value, serde_json::json!("file"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_json() {
|
||||||
|
let value = serde_json::json!("file");
|
||||||
|
|
||||||
|
let ty: FileType = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(ty, FileType::File);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_msgpack() {
|
||||||
|
let ty = FileType::File;
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&ty).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or causing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&FileType::File).unwrap();
|
||||||
|
|
||||||
|
let ty: FileType = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(ty, FileType::File);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,658 @@
|
|||||||
|
use bitflags::bitflags;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::utils;
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[serde(default, deny_unknown_fields, rename_all = "snake_case")]
|
||||||
|
pub struct SetPermissionsOptions {
|
||||||
|
/// Whether or not to exclude symlinks from traversal entirely, meaning that permissions will
|
||||||
|
/// not be set on symlinks (usually resolving the symlink and setting the permission of the
|
||||||
|
/// referenced file or directory) that are explicitly provided or show up during recursion.
|
||||||
|
#[serde(skip_serializing_if = "utils::is_false")]
|
||||||
|
pub exclude_symlinks: bool,
|
||||||
|
|
||||||
|
/// Whether or not to traverse symlinks when recursively setting permissions. Note that this
|
||||||
|
/// does NOT influence setting permissions when encountering a symlink as most platforms will
|
||||||
|
/// resolve the symlink before setting permissions.
|
||||||
|
#[serde(skip_serializing_if = "utils::is_false")]
|
||||||
|
pub follow_symlinks: bool,
|
||||||
|
|
||||||
|
/// Whether or not to set the permissions of the file hierarchies rooted in the paths, instead
|
||||||
|
/// of just the paths themselves.
|
||||||
|
#[serde(skip_serializing_if = "utils::is_false")]
|
||||||
|
pub recursive: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents permissions to apply to some path on a remote machine
|
||||||
|
///
|
||||||
|
/// When used to set permissions on a file, directory, or symlink,
|
||||||
|
/// only fields that are set (not `None`) will be applied.
|
||||||
|
///
|
||||||
|
/// On `Unix` platforms, this translates directly into the mode that
|
||||||
|
/// you would find with `chmod`. On all other platforms, this uses the
|
||||||
|
/// write flags to determine whether or not to set the readonly status.
|
||||||
|
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct Permissions {
|
||||||
|
/// Represents whether or not owner can read from the file
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub owner_read: Option<bool>,
|
||||||
|
|
||||||
|
/// Represents whether or not owner can write to the file
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub owner_write: Option<bool>,
|
||||||
|
|
||||||
|
/// Represents whether or not owner can execute the file
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub owner_exec: Option<bool>,
|
||||||
|
|
||||||
|
/// Represents whether or not associated group can read from the file
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub group_read: Option<bool>,
|
||||||
|
|
||||||
|
/// Represents whether or not associated group can write to the file
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub group_write: Option<bool>,
|
||||||
|
|
||||||
|
/// Represents whether or not associated group can execute the file
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub group_exec: Option<bool>,
|
||||||
|
|
||||||
|
/// Represents whether or not other can read from the file
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub other_read: Option<bool>,
|
||||||
|
|
||||||
|
/// Represents whether or not other can write to the file
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub other_write: Option<bool>,
|
||||||
|
|
||||||
|
/// Represents whether or not other can execute the file
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub other_exec: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Permissions {
|
||||||
|
/// Creates a set of [`Permissions`] that indicate readonly status.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use distant_protocol::Permissions;
|
||||||
|
///
|
||||||
|
/// let permissions = Permissions::readonly();
|
||||||
|
/// assert_eq!(permissions.is_readonly(), Some(true));
|
||||||
|
/// assert_eq!(permissions.is_writable(), Some(false));
|
||||||
|
/// ```
|
||||||
|
pub fn readonly() -> Self {
|
||||||
|
Self {
|
||||||
|
owner_write: Some(false),
|
||||||
|
group_write: Some(false),
|
||||||
|
other_write: Some(false),
|
||||||
|
|
||||||
|
owner_read: Some(true),
|
||||||
|
group_read: Some(true),
|
||||||
|
other_read: Some(true),
|
||||||
|
|
||||||
|
owner_exec: None,
|
||||||
|
group_exec: None,
|
||||||
|
other_exec: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/// Creates a set of [`Permissions`] that indicate globally writable status.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use distant_protocol::Permissions;
|
||||||
|
///
|
||||||
|
/// let permissions = Permissions::writable();
|
||||||
|
/// assert_eq!(permissions.is_readonly(), Some(false));
|
||||||
|
/// assert_eq!(permissions.is_writable(), Some(true));
|
||||||
|
/// ```
|
||||||
|
pub fn writable() -> Self {
|
||||||
|
Self {
|
||||||
|
owner_write: Some(true),
|
||||||
|
group_write: Some(true),
|
||||||
|
other_write: Some(true),
|
||||||
|
|
||||||
|
owner_read: Some(true),
|
||||||
|
group_read: Some(true),
|
||||||
|
other_read: Some(true),
|
||||||
|
|
||||||
|
owner_exec: None,
|
||||||
|
group_exec: None,
|
||||||
|
other_exec: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the permission set has a value specified for each permission (no `None`
|
||||||
|
/// settings).
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use distant_protocol::Permissions;
|
||||||
|
///
|
||||||
|
/// let permissions = Permissions {
|
||||||
|
/// owner_write: Some(true),
|
||||||
|
/// group_write: Some(false),
|
||||||
|
/// other_write: Some(true),
|
||||||
|
/// owner_read: Some(false),
|
||||||
|
/// group_read: Some(true),
|
||||||
|
/// other_read: Some(false),
|
||||||
|
/// owner_exec: Some(true),
|
||||||
|
/// group_exec: Some(false),
|
||||||
|
/// other_exec: Some(true),
|
||||||
|
/// };
|
||||||
|
/// assert!(permissions.is_complete());
|
||||||
|
/// ```
|
||||||
|
pub fn is_complete(&self) -> bool {
|
||||||
|
self.owner_read.is_some()
|
||||||
|
&& self.owner_write.is_some()
|
||||||
|
&& self.owner_exec.is_some()
|
||||||
|
&& self.group_read.is_some()
|
||||||
|
&& self.group_write.is_some()
|
||||||
|
&& self.group_exec.is_some()
|
||||||
|
&& self.other_read.is_some()
|
||||||
|
&& self.other_write.is_some()
|
||||||
|
&& self.other_exec.is_some()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if permissions represent readonly, `false` if permissions represent
|
||||||
|
/// writable, and `None` if no permissions have been set to indicate either status.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use distant_protocol::Permissions;
|
||||||
|
///
|
||||||
|
/// assert_eq!(
|
||||||
|
/// Permissions { owner_write: Some(true), ..Default::default() }.is_readonly(),
|
||||||
|
/// Some(false)
|
||||||
|
/// );
|
||||||
|
///
|
||||||
|
/// assert_eq!(
|
||||||
|
/// Permissions { owner_write: Some(false), ..Default::default() }.is_readonly(),
|
||||||
|
/// Some(true)
|
||||||
|
/// );
|
||||||
|
///
|
||||||
|
/// assert_eq!(
|
||||||
|
/// Permissions { ..Default::default() }.is_writable(),
|
||||||
|
/// None
|
||||||
|
/// );
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
pub fn is_readonly(&self) -> Option<bool> {
|
||||||
|
// Negate the writable status to indicate whether or not readonly
|
||||||
|
self.is_writable().map(|x| !x)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if permissions represent ability to write, `false` if permissions represent
|
||||||
|
/// inability to write, and `None` if no permissions have been set to indicate either status.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use distant_protocol::Permissions;
|
||||||
|
///
|
||||||
|
/// assert_eq!(
|
||||||
|
/// Permissions { owner_write: Some(true), ..Default::default() }.is_writable(),
|
||||||
|
/// Some(true)
|
||||||
|
/// );
|
||||||
|
///
|
||||||
|
/// assert_eq!(
|
||||||
|
/// Permissions { owner_write: Some(false), ..Default::default() }.is_writable(),
|
||||||
|
/// Some(false)
|
||||||
|
/// );
|
||||||
|
///
|
||||||
|
/// assert_eq!(
|
||||||
|
/// Permissions { ..Default::default() }.is_writable(),
|
||||||
|
/// None
|
||||||
|
/// );
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
pub fn is_writable(&self) -> Option<bool> {
|
||||||
|
match (self.owner_write, self.group_write, self.other_write) {
|
||||||
|
(None, None, None) => None,
|
||||||
|
(owner, group, other) => {
|
||||||
|
Some(owner.unwrap_or(false) || group.unwrap_or(false) || other.unwrap_or(false))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Applies `other` settings to `self`, overwriting any of the permissions in `self` with `other`.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use distant_protocol::Permissions;
|
||||||
|
///
|
||||||
|
/// let mut a = Permissions {
|
||||||
|
/// owner_read: Some(true),
|
||||||
|
/// owner_write: Some(false),
|
||||||
|
/// owner_exec: None,
|
||||||
|
/// ..Default::default()
|
||||||
|
/// };
|
||||||
|
///
|
||||||
|
/// let b = Permissions {
|
||||||
|
/// owner_read: Some(false),
|
||||||
|
/// owner_write: None,
|
||||||
|
/// owner_exec: Some(true),
|
||||||
|
/// ..Default::default()
|
||||||
|
/// };
|
||||||
|
///
|
||||||
|
/// a.apply_from(&b);
|
||||||
|
///
|
||||||
|
/// assert_eq!(a, Permissions {
|
||||||
|
/// owner_read: Some(false),
|
||||||
|
/// owner_write: Some(false),
|
||||||
|
/// owner_exec: Some(true),
|
||||||
|
/// ..Default::default()
|
||||||
|
/// });
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
pub fn apply_from(&mut self, other: &Self) {
|
||||||
|
macro_rules! apply {
|
||||||
|
($key:ident) => {{
|
||||||
|
if let Some(value) = other.$key {
|
||||||
|
self.$key = Some(value);
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
apply!(owner_read);
|
||||||
|
apply!(owner_write);
|
||||||
|
apply!(owner_exec);
|
||||||
|
apply!(group_read);
|
||||||
|
apply!(group_write);
|
||||||
|
apply!(group_exec);
|
||||||
|
apply!(other_read);
|
||||||
|
apply!(other_write);
|
||||||
|
apply!(other_exec);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Applies `self` settings to `other`, overwriting any of the permissions in `other` with
|
||||||
|
/// `self`.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use distant_protocol::Permissions;
|
||||||
|
///
|
||||||
|
/// let a = Permissions {
|
||||||
|
/// owner_read: Some(true),
|
||||||
|
/// owner_write: Some(false),
|
||||||
|
/// owner_exec: None,
|
||||||
|
/// ..Default::default()
|
||||||
|
/// };
|
||||||
|
///
|
||||||
|
/// let mut b = Permissions {
|
||||||
|
/// owner_read: Some(false),
|
||||||
|
/// owner_write: None,
|
||||||
|
/// owner_exec: Some(true),
|
||||||
|
/// ..Default::default()
|
||||||
|
/// };
|
||||||
|
///
|
||||||
|
/// a.apply_to(&mut b);
|
||||||
|
///
|
||||||
|
/// assert_eq!(b, Permissions {
|
||||||
|
/// owner_read: Some(true),
|
||||||
|
/// owner_write: Some(false),
|
||||||
|
/// owner_exec: Some(true),
|
||||||
|
/// ..Default::default()
|
||||||
|
/// });
|
||||||
|
/// ```
|
||||||
|
#[inline]
|
||||||
|
pub fn apply_to(&self, other: &mut Self) {
|
||||||
|
Self::apply_from(other, self)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a Unix `mode` into the permission set.
|
||||||
|
pub fn from_unix_mode(mode: u32) -> Self {
|
||||||
|
let flags = UnixFilePermissionFlags::from_bits_truncate(mode);
|
||||||
|
Self {
|
||||||
|
owner_read: Some(flags.contains(UnixFilePermissionFlags::OWNER_READ)),
|
||||||
|
owner_write: Some(flags.contains(UnixFilePermissionFlags::OWNER_WRITE)),
|
||||||
|
owner_exec: Some(flags.contains(UnixFilePermissionFlags::OWNER_EXEC)),
|
||||||
|
group_read: Some(flags.contains(UnixFilePermissionFlags::GROUP_READ)),
|
||||||
|
group_write: Some(flags.contains(UnixFilePermissionFlags::GROUP_WRITE)),
|
||||||
|
group_exec: Some(flags.contains(UnixFilePermissionFlags::GROUP_EXEC)),
|
||||||
|
other_read: Some(flags.contains(UnixFilePermissionFlags::OTHER_READ)),
|
||||||
|
other_write: Some(flags.contains(UnixFilePermissionFlags::OTHER_WRITE)),
|
||||||
|
other_exec: Some(flags.contains(UnixFilePermissionFlags::OTHER_EXEC)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts to a Unix `mode` from a permission set. For any missing setting, a 0 bit is used.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use distant_protocol::Permissions;
|
||||||
|
///
|
||||||
|
/// assert_eq!(Permissions {
|
||||||
|
/// owner_read: Some(true),
|
||||||
|
/// owner_write: Some(true),
|
||||||
|
/// owner_exec: Some(true),
|
||||||
|
/// group_read: Some(true),
|
||||||
|
/// group_write: Some(true),
|
||||||
|
/// group_exec: Some(true),
|
||||||
|
/// other_read: Some(true),
|
||||||
|
/// other_write: Some(true),
|
||||||
|
/// other_exec: Some(true),
|
||||||
|
/// }.to_unix_mode(), 0o777);
|
||||||
|
///
|
||||||
|
/// assert_eq!(Permissions {
|
||||||
|
/// owner_read: Some(true),
|
||||||
|
/// owner_write: Some(false),
|
||||||
|
/// owner_exec: Some(false),
|
||||||
|
/// group_read: Some(true),
|
||||||
|
/// group_write: Some(false),
|
||||||
|
/// group_exec: Some(false),
|
||||||
|
/// other_read: Some(true),
|
||||||
|
/// other_write: Some(false),
|
||||||
|
/// other_exec: Some(false),
|
||||||
|
/// }.to_unix_mode(), 0o444);
|
||||||
|
///
|
||||||
|
/// assert_eq!(Permissions {
|
||||||
|
/// owner_exec: Some(true),
|
||||||
|
/// group_exec: Some(true),
|
||||||
|
/// other_exec: Some(true),
|
||||||
|
/// ..Default::default()
|
||||||
|
/// }.to_unix_mode(), 0o111);
|
||||||
|
/// ```
|
||||||
|
pub fn to_unix_mode(&self) -> u32 {
|
||||||
|
let mut flags = UnixFilePermissionFlags::empty();
|
||||||
|
|
||||||
|
macro_rules! is_true {
|
||||||
|
($opt:expr) => {{
|
||||||
|
$opt.is_some() && $opt.unwrap()
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_true!(self.owner_read) {
|
||||||
|
flags.insert(UnixFilePermissionFlags::OWNER_READ);
|
||||||
|
}
|
||||||
|
if is_true!(self.owner_write) {
|
||||||
|
flags.insert(UnixFilePermissionFlags::OWNER_WRITE);
|
||||||
|
}
|
||||||
|
if is_true!(self.owner_exec) {
|
||||||
|
flags.insert(UnixFilePermissionFlags::OWNER_EXEC);
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_true!(self.group_read) {
|
||||||
|
flags.insert(UnixFilePermissionFlags::GROUP_READ);
|
||||||
|
}
|
||||||
|
if is_true!(self.group_write) {
|
||||||
|
flags.insert(UnixFilePermissionFlags::GROUP_WRITE);
|
||||||
|
}
|
||||||
|
if is_true!(self.group_exec) {
|
||||||
|
flags.insert(UnixFilePermissionFlags::GROUP_EXEC);
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_true!(self.other_read) {
|
||||||
|
flags.insert(UnixFilePermissionFlags::OTHER_READ);
|
||||||
|
}
|
||||||
|
if is_true!(self.other_write) {
|
||||||
|
flags.insert(UnixFilePermissionFlags::OTHER_WRITE);
|
||||||
|
}
|
||||||
|
if is_true!(self.other_exec) {
|
||||||
|
flags.insert(UnixFilePermissionFlags::OTHER_EXEC);
|
||||||
|
}
|
||||||
|
|
||||||
|
flags.bits()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
impl From<std::fs::Permissions> for Permissions {
|
||||||
|
/// Converts [`std::fs::Permissions`] into [`Permissions`] using
|
||||||
|
/// [`std::os::unix::fs::PermissionsExt::mode`] to supply the bitset.
|
||||||
|
fn from(permissions: std::fs::Permissions) -> Self {
|
||||||
|
use std::os::unix::prelude::*;
|
||||||
|
Self::from_unix_mode(permissions.mode())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
impl From<std::fs::Permissions> for Permissions {
|
||||||
|
/// Converts [`std::fs::Permissions`] into [`Permissions`] using the `readonly` flag.
|
||||||
|
///
|
||||||
|
/// This will not set executable flags, but will set all read and write flags with write flags
|
||||||
|
/// being `false` if `readonly`, otherwise set to `true`.
|
||||||
|
fn from(permissions: std::fs::Permissions) -> Self {
|
||||||
|
if permissions.readonly() {
|
||||||
|
Self::readonly()
|
||||||
|
} else {
|
||||||
|
Self::writable()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
impl From<Permissions> for std::fs::Permissions {
|
||||||
|
/// Converts [`Permissions`] into [`std::fs::Permissions`] using
|
||||||
|
/// [`std::os::unix::fs::PermissionsExt::from_mode`].
|
||||||
|
fn from(permissions: Permissions) -> Self {
|
||||||
|
use std::os::unix::prelude::*;
|
||||||
|
std::fs::Permissions::from_mode(permissions.to_unix_mode())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bitflags! {
|
||||||
|
struct UnixFilePermissionFlags: u32 {
|
||||||
|
const OWNER_READ = 0o400;
|
||||||
|
const OWNER_WRITE = 0o200;
|
||||||
|
const OWNER_EXEC = 0o100;
|
||||||
|
const GROUP_READ = 0o40;
|
||||||
|
const GROUP_WRITE = 0o20;
|
||||||
|
const GROUP_EXEC = 0o10;
|
||||||
|
const OTHER_READ = 0o4;
|
||||||
|
const OTHER_WRITE = 0o2;
|
||||||
|
const OTHER_EXEC = 0o1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_minimal_permissions_to_json() {
|
||||||
|
let permissions = Permissions {
|
||||||
|
owner_read: None,
|
||||||
|
owner_write: None,
|
||||||
|
owner_exec: None,
|
||||||
|
group_read: None,
|
||||||
|
group_write: None,
|
||||||
|
group_exec: None,
|
||||||
|
other_read: None,
|
||||||
|
other_write: None,
|
||||||
|
other_exec: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let value = serde_json::to_value(permissions).unwrap();
|
||||||
|
assert_eq!(value, serde_json::json!({}));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_full_permissions_to_json() {
|
||||||
|
let permissions = Permissions {
|
||||||
|
owner_read: Some(true),
|
||||||
|
owner_write: Some(false),
|
||||||
|
owner_exec: Some(true),
|
||||||
|
group_read: Some(false),
|
||||||
|
group_write: Some(true),
|
||||||
|
group_exec: Some(false),
|
||||||
|
other_read: Some(true),
|
||||||
|
other_write: Some(false),
|
||||||
|
other_exec: Some(true),
|
||||||
|
};
|
||||||
|
|
||||||
|
let value = serde_json::to_value(permissions).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
value,
|
||||||
|
serde_json::json!({
|
||||||
|
"owner_read": true,
|
||||||
|
"owner_write": false,
|
||||||
|
"owner_exec": true,
|
||||||
|
"group_read": false,
|
||||||
|
"group_write": true,
|
||||||
|
"group_exec": false,
|
||||||
|
"other_read": true,
|
||||||
|
"other_write": false,
|
||||||
|
"other_exec": true,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_minimal_permissions_from_json() {
|
||||||
|
let value = serde_json::json!({});
|
||||||
|
|
||||||
|
let permissions: Permissions = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
permissions,
|
||||||
|
Permissions {
|
||||||
|
owner_read: None,
|
||||||
|
owner_write: None,
|
||||||
|
owner_exec: None,
|
||||||
|
group_read: None,
|
||||||
|
group_write: None,
|
||||||
|
group_exec: None,
|
||||||
|
other_read: None,
|
||||||
|
other_write: None,
|
||||||
|
other_exec: None,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_full_permissions_from_json() {
|
||||||
|
let value = serde_json::json!({
|
||||||
|
"owner_read": true,
|
||||||
|
"owner_write": false,
|
||||||
|
"owner_exec": true,
|
||||||
|
"group_read": false,
|
||||||
|
"group_write": true,
|
||||||
|
"group_exec": false,
|
||||||
|
"other_read": true,
|
||||||
|
"other_write": false,
|
||||||
|
"other_exec": true,
|
||||||
|
});
|
||||||
|
|
||||||
|
let permissions: Permissions = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
permissions,
|
||||||
|
Permissions {
|
||||||
|
owner_read: Some(true),
|
||||||
|
owner_write: Some(false),
|
||||||
|
owner_exec: Some(true),
|
||||||
|
group_read: Some(false),
|
||||||
|
group_write: Some(true),
|
||||||
|
group_exec: Some(false),
|
||||||
|
other_read: Some(true),
|
||||||
|
other_write: Some(false),
|
||||||
|
other_exec: Some(true),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_minimal_permissions_to_msgpack() {
|
||||||
|
let permissions = Permissions {
|
||||||
|
owner_read: None,
|
||||||
|
owner_write: None,
|
||||||
|
owner_exec: None,
|
||||||
|
group_read: None,
|
||||||
|
group_write: None,
|
||||||
|
group_exec: None,
|
||||||
|
other_read: None,
|
||||||
|
other_write: None,
|
||||||
|
other_exec: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&permissions).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_full_permissions_to_msgpack() {
|
||||||
|
let permissions = Permissions {
|
||||||
|
owner_read: Some(true),
|
||||||
|
owner_write: Some(false),
|
||||||
|
owner_exec: Some(true),
|
||||||
|
group_read: Some(true),
|
||||||
|
group_write: Some(false),
|
||||||
|
group_exec: Some(true),
|
||||||
|
other_read: Some(true),
|
||||||
|
other_write: Some(false),
|
||||||
|
other_exec: Some(true),
|
||||||
|
};
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&permissions).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_minimal_permissions_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or preventing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&Permissions {
|
||||||
|
owner_read: None,
|
||||||
|
owner_write: None,
|
||||||
|
owner_exec: None,
|
||||||
|
group_read: None,
|
||||||
|
group_write: None,
|
||||||
|
group_exec: None,
|
||||||
|
other_read: None,
|
||||||
|
other_write: None,
|
||||||
|
other_exec: None,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let permissions: Permissions = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
permissions,
|
||||||
|
Permissions {
|
||||||
|
owner_read: None,
|
||||||
|
owner_write: None,
|
||||||
|
owner_exec: None,
|
||||||
|
group_read: None,
|
||||||
|
group_write: None,
|
||||||
|
group_exec: None,
|
||||||
|
other_read: None,
|
||||||
|
other_write: None,
|
||||||
|
other_exec: None,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_full_permissions_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or preventing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&Permissions {
|
||||||
|
owner_read: Some(true),
|
||||||
|
owner_write: Some(false),
|
||||||
|
owner_exec: Some(true),
|
||||||
|
group_read: Some(true),
|
||||||
|
group_write: Some(false),
|
||||||
|
group_exec: Some(true),
|
||||||
|
other_read: Some(true),
|
||||||
|
other_write: Some(false),
|
||||||
|
other_exec: Some(true),
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let permissions: Permissions = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
permissions,
|
||||||
|
Permissions {
|
||||||
|
owner_read: Some(true),
|
||||||
|
owner_write: Some(false),
|
||||||
|
owner_exec: Some(true),
|
||||||
|
group_read: Some(true),
|
||||||
|
group_write: Some(false),
|
||||||
|
group_exec: Some(true),
|
||||||
|
other_read: Some(true),
|
||||||
|
other_write: Some(false),
|
||||||
|
other_exec: Some(true),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,241 @@
|
|||||||
|
use std::fmt;
|
||||||
|
use std::num::ParseIntError;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use derive_more::{Display, Error};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// Represents the size associated with a remote PTY
|
||||||
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct PtySize {
|
||||||
|
/// Number of lines of text
|
||||||
|
pub rows: u16,
|
||||||
|
|
||||||
|
/// Number of columns of text
|
||||||
|
pub cols: u16,
|
||||||
|
|
||||||
|
/// Width of a cell in pixels. Note that some systems never fill this value and ignore it.
|
||||||
|
#[serde(default)]
|
||||||
|
pub pixel_width: u16,
|
||||||
|
|
||||||
|
/// Height of a cell in pixels. Note that some systems never fill this value and ignore it.
|
||||||
|
#[serde(default)]
|
||||||
|
pub pixel_height: u16,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PtySize {
|
||||||
|
/// Creates new size using just rows and columns
|
||||||
|
pub fn from_rows_and_cols(rows: u16, cols: u16) -> Self {
|
||||||
|
Self {
|
||||||
|
rows,
|
||||||
|
cols,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for PtySize {
|
||||||
|
/// Prints out `rows,cols[,pixel_width,pixel_height]` where the
|
||||||
|
/// pixel width and pixel height are only included if either
|
||||||
|
/// one of them is not zero
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "{},{}", self.rows, self.cols)?;
|
||||||
|
if self.pixel_width > 0 || self.pixel_height > 0 {
|
||||||
|
write!(f, ",{},{}", self.pixel_width, self.pixel_height)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for PtySize {
|
||||||
|
fn default() -> Self {
|
||||||
|
PtySize {
|
||||||
|
rows: 24,
|
||||||
|
cols: 80,
|
||||||
|
pixel_width: 0,
|
||||||
|
pixel_height: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Display, Error)]
|
||||||
|
pub enum PtySizeParseError {
|
||||||
|
MissingRows,
|
||||||
|
MissingColumns,
|
||||||
|
InvalidRows(ParseIntError),
|
||||||
|
InvalidColumns(ParseIntError),
|
||||||
|
InvalidPixelWidth(ParseIntError),
|
||||||
|
InvalidPixelHeight(ParseIntError),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for PtySize {
|
||||||
|
type Err = PtySizeParseError;
|
||||||
|
|
||||||
|
/// Attempts to parse a str into PtySize using one of the following formats:
|
||||||
|
///
|
||||||
|
/// * rows,cols (defaults to 0 for pixel_width & pixel_height)
|
||||||
|
/// * rows,cols,pixel_width,pixel_height
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
let mut tokens = s.split(',');
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
rows: tokens
|
||||||
|
.next()
|
||||||
|
.ok_or(PtySizeParseError::MissingRows)?
|
||||||
|
.trim()
|
||||||
|
.parse()
|
||||||
|
.map_err(PtySizeParseError::InvalidRows)?,
|
||||||
|
cols: tokens
|
||||||
|
.next()
|
||||||
|
.ok_or(PtySizeParseError::MissingColumns)?
|
||||||
|
.trim()
|
||||||
|
.parse()
|
||||||
|
.map_err(PtySizeParseError::InvalidColumns)?,
|
||||||
|
pixel_width: tokens
|
||||||
|
.next()
|
||||||
|
.map(|s| s.trim().parse())
|
||||||
|
.transpose()
|
||||||
|
.map_err(PtySizeParseError::InvalidPixelWidth)?
|
||||||
|
.unwrap_or(0),
|
||||||
|
pixel_height: tokens
|
||||||
|
.next()
|
||||||
|
.map(|s| s.trim().parse())
|
||||||
|
.transpose()
|
||||||
|
.map_err(PtySizeParseError::InvalidPixelHeight)?
|
||||||
|
.unwrap_or(0),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_json() {
|
||||||
|
let size = PtySize {
|
||||||
|
rows: 10,
|
||||||
|
cols: 20,
|
||||||
|
pixel_width: 30,
|
||||||
|
pixel_height: 40,
|
||||||
|
};
|
||||||
|
|
||||||
|
let value = serde_json::to_value(size).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
value,
|
||||||
|
serde_json::json!({
|
||||||
|
"rows": 10,
|
||||||
|
"cols": 20,
|
||||||
|
"pixel_width": 30,
|
||||||
|
"pixel_height": 40,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_minimal_size_from_json() {
|
||||||
|
let value = serde_json::json!({
|
||||||
|
"rows": 10,
|
||||||
|
"cols": 20,
|
||||||
|
});
|
||||||
|
|
||||||
|
let size: PtySize = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
size,
|
||||||
|
PtySize {
|
||||||
|
rows: 10,
|
||||||
|
cols: 20,
|
||||||
|
pixel_width: 0,
|
||||||
|
pixel_height: 0,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_full_size_from_json() {
|
||||||
|
let value = serde_json::json!({
|
||||||
|
"rows": 10,
|
||||||
|
"cols": 20,
|
||||||
|
"pixel_width": 30,
|
||||||
|
"pixel_height": 40,
|
||||||
|
});
|
||||||
|
|
||||||
|
let size: PtySize = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
size,
|
||||||
|
PtySize {
|
||||||
|
rows: 10,
|
||||||
|
cols: 20,
|
||||||
|
pixel_width: 30,
|
||||||
|
pixel_height: 40,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_msgpack() {
|
||||||
|
let size = PtySize {
|
||||||
|
rows: 10,
|
||||||
|
cols: 20,
|
||||||
|
pixel_width: 30,
|
||||||
|
pixel_height: 40,
|
||||||
|
};
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&size).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_minimal_size_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or causing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct PartialSize {
|
||||||
|
rows: u16,
|
||||||
|
cols: u16,
|
||||||
|
}
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&PartialSize { rows: 10, cols: 20 }).unwrap();
|
||||||
|
|
||||||
|
let size: PtySize = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
size,
|
||||||
|
PtySize {
|
||||||
|
rows: 10,
|
||||||
|
cols: 20,
|
||||||
|
pixel_width: 0,
|
||||||
|
pixel_height: 0,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_full_size_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or causing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&PtySize {
|
||||||
|
rows: 10,
|
||||||
|
cols: 20,
|
||||||
|
pixel_width: 30,
|
||||||
|
pixel_height: 40,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let size: PtySize = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
size,
|
||||||
|
PtySize {
|
||||||
|
rows: 10,
|
||||||
|
cols: 20,
|
||||||
|
pixel_width: 30,
|
||||||
|
pixel_height: 40,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,142 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// Represents information about a system
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct SystemInfo {
|
||||||
|
/// Family of the operating system as described in
|
||||||
|
/// https://doc.rust-lang.org/std/env/consts/constant.FAMILY.html
|
||||||
|
pub family: String,
|
||||||
|
|
||||||
|
/// Name of the specific operating system as described in
|
||||||
|
/// https://doc.rust-lang.org/std/env/consts/constant.OS.html
|
||||||
|
pub os: String,
|
||||||
|
|
||||||
|
/// Architecture of the CPI as described in
|
||||||
|
/// https://doc.rust-lang.org/std/env/consts/constant.ARCH.html
|
||||||
|
pub arch: String,
|
||||||
|
|
||||||
|
/// Current working directory of the running server process
|
||||||
|
pub current_dir: PathBuf,
|
||||||
|
|
||||||
|
/// Primary separator for path components for the current platform
|
||||||
|
/// as defined in https://doc.rust-lang.org/std/path/constant.MAIN_SEPARATOR.html
|
||||||
|
pub main_separator: char,
|
||||||
|
|
||||||
|
/// Name of the user running the server process
|
||||||
|
pub username: String,
|
||||||
|
|
||||||
|
/// Default shell tied to user running the server process
|
||||||
|
pub shell: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_json() {
|
||||||
|
let info = SystemInfo {
|
||||||
|
family: String::from("family"),
|
||||||
|
os: String::from("os"),
|
||||||
|
arch: String::from("arch"),
|
||||||
|
current_dir: PathBuf::from("current-dir"),
|
||||||
|
main_separator: '/',
|
||||||
|
username: String::from("username"),
|
||||||
|
shell: String::from("shell"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let value = serde_json::to_value(info).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
value,
|
||||||
|
serde_json::json!({
|
||||||
|
"family": "family",
|
||||||
|
"os": "os",
|
||||||
|
"arch": "arch",
|
||||||
|
"current_dir": "current-dir",
|
||||||
|
"main_separator": '/',
|
||||||
|
"username": "username",
|
||||||
|
"shell": "shell",
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_json() {
|
||||||
|
let value = serde_json::json!({
|
||||||
|
"family": "family",
|
||||||
|
"os": "os",
|
||||||
|
"arch": "arch",
|
||||||
|
"current_dir": "current-dir",
|
||||||
|
"main_separator": '/',
|
||||||
|
"username": "username",
|
||||||
|
"shell": "shell",
|
||||||
|
});
|
||||||
|
|
||||||
|
let info: SystemInfo = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
info,
|
||||||
|
SystemInfo {
|
||||||
|
family: String::from("family"),
|
||||||
|
os: String::from("os"),
|
||||||
|
arch: String::from("arch"),
|
||||||
|
current_dir: PathBuf::from("current-dir"),
|
||||||
|
main_separator: '/',
|
||||||
|
username: String::from("username"),
|
||||||
|
shell: String::from("shell"),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_msgpack() {
|
||||||
|
let info = SystemInfo {
|
||||||
|
family: String::from("family"),
|
||||||
|
os: String::from("os"),
|
||||||
|
arch: String::from("arch"),
|
||||||
|
current_dir: PathBuf::from("current-dir"),
|
||||||
|
main_separator: '/',
|
||||||
|
username: String::from("username"),
|
||||||
|
shell: String::from("shell"),
|
||||||
|
};
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&info).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or causing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&SystemInfo {
|
||||||
|
family: String::from("family"),
|
||||||
|
os: String::from("os"),
|
||||||
|
arch: String::from("arch"),
|
||||||
|
current_dir: PathBuf::from("current-dir"),
|
||||||
|
main_separator: '/',
|
||||||
|
username: String::from("username"),
|
||||||
|
shell: String::from("shell"),
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let info: SystemInfo = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
info,
|
||||||
|
SystemInfo {
|
||||||
|
family: String::from("family"),
|
||||||
|
os: String::from("os"),
|
||||||
|
arch: String::from("arch"),
|
||||||
|
current_dir: PathBuf::from("current-dir"),
|
||||||
|
main_separator: '/',
|
||||||
|
username: String::from("username"),
|
||||||
|
shell: String::from("shell"),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,130 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::common::{Capabilities, SemVer};
|
||||||
|
|
||||||
|
/// Represents version information.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct Version {
|
||||||
|
/// General version of server (arbitrary format)
|
||||||
|
pub server_version: String,
|
||||||
|
|
||||||
|
/// Protocol version
|
||||||
|
pub protocol_version: SemVer,
|
||||||
|
|
||||||
|
/// Capabilities of the server
|
||||||
|
pub capabilities: Capabilities,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::common::Capability;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_json() {
|
||||||
|
let version = Version {
|
||||||
|
server_version: String::from("some version"),
|
||||||
|
protocol_version: (1, 2, 3),
|
||||||
|
capabilities: [Capability {
|
||||||
|
kind: String::from("some kind"),
|
||||||
|
description: String::from("some description"),
|
||||||
|
}]
|
||||||
|
.into_iter()
|
||||||
|
.collect(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let value = serde_json::to_value(version).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
value,
|
||||||
|
serde_json::json!({
|
||||||
|
"server_version": "some version",
|
||||||
|
"protocol_version": [1, 2, 3],
|
||||||
|
"capabilities": [{
|
||||||
|
"kind": "some kind",
|
||||||
|
"description": "some description",
|
||||||
|
}]
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_json() {
|
||||||
|
let value = serde_json::json!({
|
||||||
|
"server_version": "some version",
|
||||||
|
"protocol_version": [1, 2, 3],
|
||||||
|
"capabilities": [{
|
||||||
|
"kind": "some kind",
|
||||||
|
"description": "some description",
|
||||||
|
}]
|
||||||
|
});
|
||||||
|
|
||||||
|
let version: Version = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
version,
|
||||||
|
Version {
|
||||||
|
server_version: String::from("some version"),
|
||||||
|
protocol_version: (1, 2, 3),
|
||||||
|
capabilities: [Capability {
|
||||||
|
kind: String::from("some kind"),
|
||||||
|
description: String::from("some description"),
|
||||||
|
}]
|
||||||
|
.into_iter()
|
||||||
|
.collect(),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_msgpack() {
|
||||||
|
let version = Version {
|
||||||
|
server_version: String::from("some version"),
|
||||||
|
protocol_version: (1, 2, 3),
|
||||||
|
capabilities: [Capability {
|
||||||
|
kind: String::from("some kind"),
|
||||||
|
description: String::from("some description"),
|
||||||
|
}]
|
||||||
|
.into_iter()
|
||||||
|
.collect(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&version).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or causing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&Version {
|
||||||
|
server_version: String::from("some version"),
|
||||||
|
protocol_version: (1, 2, 3),
|
||||||
|
capabilities: [Capability {
|
||||||
|
kind: String::from("some kind"),
|
||||||
|
description: String::from("some description"),
|
||||||
|
}]
|
||||||
|
.into_iter()
|
||||||
|
.collect(),
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let version: Version = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
version,
|
||||||
|
Version {
|
||||||
|
server_version: String::from("some version"),
|
||||||
|
protocol_version: (1, 2, 3),
|
||||||
|
capabilities: [Capability {
|
||||||
|
kind: String::from("some kind"),
|
||||||
|
description: String::from("some description"),
|
||||||
|
}]
|
||||||
|
.into_iter()
|
||||||
|
.collect(),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,17 @@
|
|||||||
|
mod common;
|
||||||
|
mod msg;
|
||||||
|
mod request;
|
||||||
|
mod response;
|
||||||
|
mod utils;
|
||||||
|
|
||||||
|
pub use common::*;
|
||||||
|
pub use msg::*;
|
||||||
|
pub use request::*;
|
||||||
|
pub use response::*;
|
||||||
|
|
||||||
|
/// Protocol version indicated by the tuple of (major, minor, patch).
|
||||||
|
///
|
||||||
|
/// This is different from the crate version, which matches that of the complete suite of distant
|
||||||
|
/// crates. Rather, this verison is used to provide stability indicators when the protocol itself
|
||||||
|
/// changes across crate versions.
|
||||||
|
pub const PROTOCOL_VERSION: SemVer = (0, 1, 0);
|
@ -0,0 +1,192 @@
|
|||||||
|
use derive_more::From;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// Represents a wrapper around a message, supporting single and batch payloads.
|
||||||
|
#[derive(Clone, Debug, From, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[serde(untagged)]
|
||||||
|
pub enum Msg<T> {
|
||||||
|
Single(T),
|
||||||
|
Batch(Vec<T>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Msg<T> {
|
||||||
|
/// Creates a new msg with a singular payload.
|
||||||
|
#[inline]
|
||||||
|
pub fn single(payload: T) -> Self {
|
||||||
|
Self::Single(payload)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new msg with a batch payload.
|
||||||
|
pub fn batch<I>(payloads: I) -> Self
|
||||||
|
where
|
||||||
|
I: IntoIterator<Item = T>,
|
||||||
|
{
|
||||||
|
Self::Batch(payloads.into_iter().collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if msg has a single payload.
|
||||||
|
#[inline]
|
||||||
|
pub fn is_single(&self) -> bool {
|
||||||
|
matches!(self, Self::Single(_))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns reference to single value if msg is single variant.
|
||||||
|
#[inline]
|
||||||
|
pub fn as_single(&self) -> Option<&T> {
|
||||||
|
match self {
|
||||||
|
Self::Single(x) => Some(x),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns mutable reference to single value if msg is single variant.
|
||||||
|
#[inline]
|
||||||
|
pub fn as_mut_single(&mut self) -> Option<&T> {
|
||||||
|
match self {
|
||||||
|
Self::Single(x) => Some(x),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the single value if msg is single variant.
|
||||||
|
#[inline]
|
||||||
|
pub fn into_single(self) -> Option<T> {
|
||||||
|
match self {
|
||||||
|
Self::Single(x) => Some(x),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if msg has a batch of payloads.
|
||||||
|
#[inline]
|
||||||
|
pub fn is_batch(&self) -> bool {
|
||||||
|
matches!(self, Self::Batch(_))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns reference to batch value if msg is batch variant.
|
||||||
|
#[inline]
|
||||||
|
pub fn as_batch(&self) -> Option<&[T]> {
|
||||||
|
match self {
|
||||||
|
Self::Batch(x) => Some(x),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns mutable reference to batch value if msg is batch variant.
|
||||||
|
#[inline]
|
||||||
|
pub fn as_mut_batch(&mut self) -> Option<&mut [T]> {
|
||||||
|
match self {
|
||||||
|
Self::Batch(x) => Some(x),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the batch value if msg is batch variant.
|
||||||
|
#[inline]
|
||||||
|
pub fn into_batch(self) -> Option<Vec<T>> {
|
||||||
|
match self {
|
||||||
|
Self::Batch(x) => Some(x),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert into a collection of payload data.
|
||||||
|
#[inline]
|
||||||
|
pub fn into_vec(self) -> Vec<T> {
|
||||||
|
match self {
|
||||||
|
Self::Single(x) => vec![x],
|
||||||
|
Self::Batch(x) => x,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
mod single {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_json() {
|
||||||
|
let msg = Msg::single("hello world");
|
||||||
|
|
||||||
|
let value = serde_json::to_value(msg).unwrap();
|
||||||
|
assert_eq!(value, serde_json::json!("hello world"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_json() {
|
||||||
|
let value = serde_json::json!("hello world");
|
||||||
|
|
||||||
|
let msg: Msg<String> = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(msg, Msg::single(String::from("hello world")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_msgpack() {
|
||||||
|
let msg = Msg::single("hello world");
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&msg).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or causing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&Msg::single("hello world")).unwrap();
|
||||||
|
|
||||||
|
let msg: Msg<String> = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(msg, Msg::single(String::from("hello world")));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod batch {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_json() {
|
||||||
|
let msg = Msg::batch(["hello world"]);
|
||||||
|
|
||||||
|
let value = serde_json::to_value(msg).unwrap();
|
||||||
|
assert_eq!(value, serde_json::json!(["hello world"]));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_json() {
|
||||||
|
let value = serde_json::json!(["hello world"]);
|
||||||
|
|
||||||
|
let msg: Msg<String> = serde_json::from_value(value).unwrap();
|
||||||
|
assert_eq!(msg, Msg::batch([String::from("hello world")]));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_serialize_to_msgpack() {
|
||||||
|
let msg = Msg::batch(["hello world"]);
|
||||||
|
|
||||||
|
// NOTE: We don't actually check the output here because it's an implementation detail
|
||||||
|
// and could change as we change how serialization is done. This is merely to verify
|
||||||
|
// that we can serialize since there are times when serde fails to serialize at
|
||||||
|
// runtime.
|
||||||
|
let _ = rmp_serde::encode::to_vec_named(&msg).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_be_able_to_deserialize_from_msgpack() {
|
||||||
|
// NOTE: It may seem odd that we are serializing just to deserialize, but this is to
|
||||||
|
// verify that we are not corrupting or causing issues when serializing on a
|
||||||
|
// client/server and then trying to deserialize on the other side. This has happened
|
||||||
|
// enough times with minor changes that we need tests to verify.
|
||||||
|
let buf = rmp_serde::encode::to_vec_named(&Msg::batch(["hello world"])).unwrap();
|
||||||
|
|
||||||
|
let msg: Msg<String> = rmp_serde::decode::from_slice(&buf).unwrap();
|
||||||
|
assert_eq!(msg, Msg::batch([String::from("hello world")]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,68 +0,0 @@
|
|||||||
use indoc::indoc;
|
|
||||||
use rstest::*;
|
|
||||||
|
|
||||||
use crate::cli::fixtures::*;
|
|
||||||
|
|
||||||
const EXPECTED_TABLE: &str = indoc! {"
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| kind | description |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| cancel_search | Supports canceling an active search against the filesystem |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| capabilities | Supports retrieving capabilities |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| copy | Supports copying files, directories, and symlinks |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| dir_create | Supports creating directory |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| dir_read | Supports reading directory |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| exists | Supports checking if a path exists |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| file_append | Supports appending to binary file |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| file_append_text | Supports appending to text file |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| file_read | Supports reading binary file |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| file_read_text | Supports reading text file |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| file_write | Supports writing binary file |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| file_write_text | Supports writing text file |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| metadata | Supports retrieving metadata about a file, directory, or symlink |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| proc_kill | Supports killing a spawned process |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| proc_resize_pty | Supports resizing the pty of a spawned process |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| proc_spawn | Supports spawning a process |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| proc_stdin | Supports sending stdin to a spawned process |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| remove | Supports removing files, directories, and symlinks |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| rename | Supports renaming files, directories, and symlinks |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| search | Supports searching filesystem using queries |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| set_permissions | Supports setting permissions on a file, directory, or symlink |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| system_info | Supports retrieving system information |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| unwatch | Supports unwatching filesystem for changes |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
| watch | Supports watching filesystem for changes |
|
|
||||||
+------------------+------------------------------------------------------------------+
|
|
||||||
"};
|
|
||||||
|
|
||||||
#[rstest]
|
|
||||||
#[test_log::test]
|
|
||||||
fn should_output_capabilities(ctx: DistantManagerCtx) {
|
|
||||||
ctx.cmd("capabilities")
|
|
||||||
.assert()
|
|
||||||
.success()
|
|
||||||
.stdout(EXPECTED_TABLE)
|
|
||||||
.stderr("");
|
|
||||||
}
|
|
@ -0,0 +1,34 @@
|
|||||||
|
use distant_core::protocol::PROTOCOL_VERSION;
|
||||||
|
use rstest::*;
|
||||||
|
|
||||||
|
use crate::cli::fixtures::*;
|
||||||
|
use crate::cli::utils::TrimmedLinesMatchPredicate;
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
#[test_log::test]
|
||||||
|
fn should_output_capabilities(ctx: DistantManagerCtx) {
|
||||||
|
// Because all of our crates have the same version, we can expect it to match
|
||||||
|
let package_name = "distant-core";
|
||||||
|
let package_version = env!("CARGO_PKG_VERSION");
|
||||||
|
let (major, minor, patch) = PROTOCOL_VERSION;
|
||||||
|
|
||||||
|
// Since our client and server are built the same, all capabilities should be listed with +
|
||||||
|
// and using 4 columns since we are not using a tty
|
||||||
|
let expected = indoc::formatdoc! {"
|
||||||
|
Client: distant {package_version} (Protocol {major}.{minor}.{patch})
|
||||||
|
Server: {package_name} {package_version} (Protocol {major}.{minor}.{patch})
|
||||||
|
Capabilities supported (+) or not (-):
|
||||||
|
+cancel_search +copy +dir_create +dir_read
|
||||||
|
+exists +file_append +file_append_text +file_read
|
||||||
|
+file_read_text +file_write +file_write_text +metadata
|
||||||
|
+proc_kill +proc_resize_pty +proc_spawn +proc_stdin
|
||||||
|
+remove +rename +search +set_permissions
|
||||||
|
+system_info +unwatch +version +watch
|
||||||
|
"};
|
||||||
|
|
||||||
|
ctx.cmd("version")
|
||||||
|
.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(TrimmedLinesMatchPredicate::new(expected))
|
||||||
|
.stderr("");
|
||||||
|
}
|
@ -1,9 +1,12 @@
|
|||||||
use predicates::prelude::*;
|
use ::predicates::prelude::*;
|
||||||
|
|
||||||
|
mod predicates;
|
||||||
mod reader;
|
mod reader;
|
||||||
|
|
||||||
|
pub use self::predicates::TrimmedLinesMatchPredicate;
|
||||||
pub use reader::ThreadedReader;
|
pub use reader::ThreadedReader;
|
||||||
|
|
||||||
/// Produces a regex predicate using the given string
|
/// Produces a regex predicate using the given string
|
||||||
pub fn regex_pred(s: &str) -> predicates::str::RegexPredicate {
|
pub fn regex_pred(s: &str) -> ::predicates::str::RegexPredicate {
|
||||||
predicate::str::is_match(s).unwrap()
|
predicate::str::is_match(s).unwrap()
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,50 @@
|
|||||||
|
use predicates::reflection::PredicateReflection;
|
||||||
|
use predicates::Predicate;
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
|
/// Checks if lines of text match the provided, trimming each line
|
||||||
|
/// of both before comparing.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
pub struct TrimmedLinesMatchPredicate {
|
||||||
|
pattern: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TrimmedLinesMatchPredicate {
|
||||||
|
pub fn new(pattern: impl Into<String>) -> Self {
|
||||||
|
Self {
|
||||||
|
pattern: pattern.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for TrimmedLinesMatchPredicate {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "trimmed_lines expects {}", self.pattern)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Predicate<str> for TrimmedLinesMatchPredicate {
|
||||||
|
fn eval(&self, variable: &str) -> bool {
|
||||||
|
let mut expected = self.pattern.lines();
|
||||||
|
let mut actual = variable.lines();
|
||||||
|
|
||||||
|
// Fail if we don't have the same number of lines
|
||||||
|
// or of the trimmed result of lines don't match
|
||||||
|
//
|
||||||
|
// Otherwise if we finish processing all lines,
|
||||||
|
// we are a success
|
||||||
|
loop {
|
||||||
|
match (expected.next(), actual.next()) {
|
||||||
|
(Some(expected), Some(actual)) => {
|
||||||
|
if expected.trim() != actual.trim() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(None, None) => return true,
|
||||||
|
_ => return false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PredicateReflection for TrimmedLinesMatchPredicate {}
|
Loading…
Reference in New Issue