Add json support to gui

pull/1083/head
Rafał Mikrut 8 months ago
parent 9b57382e39
commit 1409d014b7

@ -11,6 +11,7 @@ use futures::channel::mpsc::UnboundedSender;
use log::debug;
use mime_guess::get_mime_extensions;
use rayon::prelude::*;
use serde::Serialize;
use crate::common::{prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads};
use crate::common_dir_traversal::{CheckingMethod, DirTraversalBuilder, DirTraversalResult, FileEntry, ProgressData, ToolType};
@ -158,7 +159,7 @@ const WORKAROUNDS: &[(&str, &str)] = &[
("exe", "xls"), // Not sure why xls is not recognized
];
#[derive(Clone)]
#[derive(Clone, Serialize)]
pub struct BadFileEntry {
pub path: PathBuf,
pub modified_date: u64,
@ -426,6 +427,10 @@ impl PrintResults for BadExtensions {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.bad_extensions_files, pretty_print)
}
}
impl BadExtensions {

@ -12,13 +12,14 @@ use futures::channel::mpsc::UnboundedSender;
use humansize::{format_size, BINARY};
use log::debug;
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use crate::common::{check_folder_children, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads, split_path};
use crate::common_dir_traversal::{common_get_entry_data_metadata, common_read_dir, get_lowercase_name, get_modified_time, CheckingMethod, ProgressData, ToolType};
use crate::common_tool::{CommonData, CommonToolData, DeleteMethod};
use crate::common_traits::{DebugPrint, PrintResults};
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct FileEntry {
pub path: PathBuf,
pub size: u64,
@ -39,7 +40,7 @@ pub struct Info {
pub struct BigFile {
common_data: CommonToolData,
information: Info,
big_files: Vec<(u64, FileEntry)>,
big_files: Vec<FileEntry>,
number_of_files_to_check: usize,
search_mode: SearchMode,
}
@ -189,7 +190,7 @@ impl BigFile {
iter = Box::new(old_map.into_iter().rev());
}
for (size, mut vector) in iter {
for (_size, mut vector) in iter {
if self.information.number_of_real_files < self.number_of_files_to_check {
if vector.len() > 1 {
vector.sort_unstable_by_key(|e| {
@ -199,7 +200,7 @@ impl BigFile {
}
for file in vector {
if self.information.number_of_real_files < self.number_of_files_to_check {
self.big_files.push((size, file));
self.big_files.push(file);
self.information.number_of_real_files += 1;
} else {
break;
@ -214,7 +215,7 @@ impl BigFile {
fn delete_files(&mut self) {
match self.common_data.delete_method {
DeleteMethod::Delete => {
for (_, file_entry) in &self.big_files {
for file_entry in &self.big_files {
if fs::remove_file(&file_entry.path).is_err() {
self.common_data.text_messages.warnings.push(file_entry.path.display().to_string());
}
@ -262,8 +263,8 @@ impl PrintResults for BigFile {
} else {
writeln!(writer, "{} the smallest files.\n\n", self.information.number_of_real_files)?;
}
for (size, file_entry) in &self.big_files {
writeln!(writer, "{} ({}) - {}", format_size(*size, BINARY), size, file_entry.path.display())?;
for file_entry in &self.big_files {
writeln!(writer, "{} ({}) - {}", format_size(file_entry.size, BINARY), file_entry.size, file_entry.path.display())?;
}
} else {
write!(writer, "Not found any files.").unwrap();
@ -271,6 +272,10 @@ impl PrintResults for BigFile {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.big_files, pretty_print)
}
}
impl CommonData for BigFile {
@ -287,7 +292,7 @@ impl BigFile {
self.search_mode = search_mode;
}
pub const fn get_big_files(&self) -> &Vec<(u64, FileEntry)> {
pub const fn get_big_files(&self) -> &Vec<FileEntry> {
&self.big_files
}

@ -481,6 +481,10 @@ impl PrintResults for BrokenFiles {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.broken_files, pretty_print)
}
}
fn check_extension_availability(file_name_lowercase: &str) -> TypeOfFile {

@ -1,4 +1,5 @@
use fun_time::fun_time;
use serde::Serialize;
use std::fs::File;
use std::io::{BufWriter, Write};
use std::path::Path;
@ -31,6 +32,39 @@ pub trait PrintResults {
writer.flush()?;
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()>;
fn save_results_to_file_as_json_internal<T: Serialize>(&self, file_name: &str, item_to_serialize: &T, pretty_print: bool) -> std::io::Result<()> {
if pretty_print {
self.save_results_to_file_as_json_pretty(file_name, item_to_serialize)
} else {
self.save_results_to_file_as_json_compact(file_name, item_to_serialize)
}
}
#[fun_time(message = "save_results_to_file_as_json_pretty")]
fn save_results_to_file_as_json_pretty<T: Serialize>(&self, file_name: &str, item_to_serialize: &T) -> std::io::Result<()> {
let file_handler = File::create(file_name)?;
let mut writer = BufWriter::new(file_handler);
serde_json::to_writer_pretty(&mut writer, item_to_serialize)?;
Ok(())
}
#[fun_time(message = "save_results_to_file_as_json_compact")]
fn save_results_to_file_as_json_compact<T: Serialize>(&self, file_name: &str, item_to_serialize: &T) -> std::io::Result<()> {
let file_handler = File::create(file_name)?;
let mut writer = BufWriter::new(file_handler);
serde_json::to_writer(&mut writer, item_to_serialize)?;
Ok(())
}
fn save_all_in_one(&self, file_name: &str) -> std::io::Result<()> {
self.save_results_to_file_as_json(&format!("{file_name}_pretty.json"), true)?;
self.save_results_to_file_as_json(&format!("{file_name}_compact.json"), false)?;
self.print_results_to_file(&format!("{file_name}.txt"))?;
Ok(())
}
}
pub trait ResultEntry {

@ -989,6 +989,24 @@ impl PrintResults for DuplicateFinder {
}
writeln!(writer)?;
}
} else if !self.files_with_identical_names_referenced.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same names in referenced folders-------------------------------------------------"
)?;
writeln!(
writer,
"Found {} files in {} groups with same name(may have different content)",
self.information.number_of_duplicated_files_by_name, self.information.number_of_groups_by_name,
)?;
for (name, (file_entry, vector)) in self.files_with_identical_names_referenced.iter().rev() {
writeln!(writer, "Name - {} - {} files ", name, vector.len())?;
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
for j in vector {
writeln!(writer, "{}", j.path.display())?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any files with same names.")?;
}
@ -1011,6 +1029,24 @@ impl PrintResults for DuplicateFinder {
}
writeln!(writer)?;
}
} else if !self.files_with_identical_names_referenced.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same size and names in referenced folders-------------------------------------------------"
)?;
writeln!(
writer,
"Found {} files in {} groups with same size and name(may have different content)",
self.information.number_of_duplicated_files_by_size_name, self.information.number_of_groups_by_size_name,
)?;
for ((size, name), (file_entry, vector)) in self.files_with_identical_size_names_referenced.iter().rev() {
writeln!(writer, "Name - {}, {} - {} files ", name, format_size(*size, BINARY), vector.len())?;
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
for j in vector {
writeln!(writer, "{}", j.path.display())?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any files with same size and names.")?;
}
@ -1034,6 +1070,25 @@ impl PrintResults for DuplicateFinder {
writeln!(writer, "{}", file_entry.path.display())?;
}
}
} else if !self.files_with_identical_size_referenced.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same size in referenced folders-------------------------------------------------"
)?;
writeln!(
writer,
"Found {} duplicated files which in {} groups which takes {}.",
self.information.number_of_duplicated_files_by_size,
self.information.number_of_groups_by_size,
format_size(self.information.lost_space_by_size, BINARY)
)?;
for (size, (file_entry, vector)) in self.files_with_identical_size_referenced.iter().rev() {
writeln!(writer, "\n---- Size {} ({}) - {} files", format_size(*size, BINARY), size, vector.len())?;
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
for file_entry in vector {
writeln!(writer, "{}", file_entry.path.display())?;
}
}
} else {
write!(writer, "Not found any duplicates.")?;
}
@ -1059,6 +1114,27 @@ impl PrintResults for DuplicateFinder {
}
}
}
} else if !self.files_with_identical_hashes_referenced.is_empty() {
writeln!(
writer,
"-------------------------------------------------Files with same hashes in referenced folders-------------------------------------------------"
)?;
writeln!(
writer,
"Found {} duplicated files which in {} groups which takes {}.",
self.information.number_of_duplicated_files_by_hash,
self.information.number_of_groups_by_hash,
format_size(self.information.lost_space_by_hash, BINARY)
)?;
for (size, vectors_vector) in self.files_with_identical_hashes_referenced.iter().rev() {
for (file_entry, vector) in vectors_vector {
writeln!(writer, "\n---- Size {} ({}) - {} files", format_size(*size, BINARY), size, vector.len())?;
writeln!(writer, "Reference file - {}", file_entry.path.display())?;
for file_entry in vector {
writeln!(writer, "{}", file_entry.path.display())?;
}
}
}
} else {
write!(writer, "Not found any duplicates.")?;
}
@ -1068,6 +1144,26 @@ impl PrintResults for DuplicateFinder {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> io::Result<()> {
if self.get_use_reference() {
match self.check_method {
CheckingMethod::Name => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_names_referenced, pretty_print),
CheckingMethod::SizeName => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_size_names_referenced, pretty_print),
CheckingMethod::Size => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_size_referenced, pretty_print),
CheckingMethod::Hash => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_hashes_referenced, pretty_print),
_ => panic!(),
}
} else {
match self.check_method {
CheckingMethod::Name => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_names, pretty_print),
CheckingMethod::SizeName => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_size_names, pretty_print),
CheckingMethod::Size => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_size, pretty_print),
CheckingMethod::Hash => self.save_results_to_file_as_json_internal(file_name, &self.files_with_identical_hashes, pretty_print),
_ => panic!(),
}
}
}
}
fn delete_files(vector: &[FileEntry], delete_method: &DeleteMethod, text_messages: &mut Messages, dryrun: bool) -> (u64, usize, usize) {

@ -141,6 +141,10 @@ impl PrintResults for EmptyFiles {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.empty_files, pretty_print)
}
}
impl EmptyFiles {

@ -158,6 +158,10 @@ impl PrintResults for EmptyFolder {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.empty_folder_list.keys().collect::<Vec<_>>(), pretty_print)
}
}
impl CommonData for EmptyFolder {

@ -127,6 +127,10 @@ impl PrintResults for InvalidSymlinks {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.invalid_symlinks, pretty_print)
}
}
impl CommonData for InvalidSymlinks {

@ -924,10 +924,40 @@ impl PrintResults for SameMusic {
file_entry.genre,
file_entry.bitrate,
file_entry.path.display()
)
.unwrap();
)?;
}
writeln!(writer).unwrap();
writeln!(writer)?;
}
} else if !self.duplicated_music_entries_referenced.is_empty() {
writeln!(writer, "{} music files which have similar friends\n\n.", self.duplicated_music_entries_referenced.len())?;
for (file_entry, vec_file_entry) in &self.duplicated_music_entries_referenced {
writeln!(writer, "Found {} music files which have similar friends", vec_file_entry.len())?;
writeln!(writer)?;
writeln!(
writer,
"TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {}",
file_entry.track_title,
file_entry.track_artist,
file_entry.year,
file_entry.length,
file_entry.genre,
file_entry.bitrate,
file_entry.path.display()
)?;
for file_entry in vec_file_entry {
writeln!(
writer,
"TT: {} - TA: {} - Y: {} - L: {} - G: {} - B: {} - P: {}",
file_entry.track_title,
file_entry.track_artist,
file_entry.year,
file_entry.length,
file_entry.genre,
file_entry.bitrate,
file_entry.path.display()
)?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any similar music files.")?;
@ -935,6 +965,14 @@ impl PrintResults for SameMusic {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
if self.get_use_reference() {
self.save_results_to_file_as_json_internal(file_name, &self.duplicated_music_entries_referenced, pretty_print)
} else {
self.save_results_to_file_as_json_internal(file_name, &self.duplicated_music_entries, pretty_print)
}
}
}
fn get_approximate_conversion(what: &mut String) {

@ -850,12 +850,46 @@ impl PrintResults for SimilarImages {
}
writeln!(writer)?;
}
} else if !self.similar_referenced_vectors.is_empty() {
writeln!(writer, "{} images which have similar friends\n\n", self.similar_referenced_vectors.len())?;
for (file_entry, vec_file_entry) in &self.similar_referenced_vectors {
writeln!(writer, "Found {} images which have similar friends", vec_file_entry.len())?;
writeln!(writer)?;
writeln!(
writer,
"{} - {} - {} - {}",
file_entry.path.display(),
file_entry.dimensions,
format_size(file_entry.size, BINARY),
get_string_from_similarity(&file_entry.similarity, self.hash_size)
)?;
for file_entry in vec_file_entry {
writeln!(
writer,
"{} - {} - {} - {}",
file_entry.path.display(),
file_entry.dimensions,
format_size(file_entry.size, BINARY),
get_string_from_similarity(&file_entry.similarity, self.hash_size)
)?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any similar images.")?;
}
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
if self.get_use_reference() {
self.save_results_to_file_as_json_internal(file_name, &self.similar_referenced_vectors, pretty_print)
} else {
self.save_results_to_file_as_json_internal(file_name, &self.similar_vectors, pretty_print)
}
}
}
pub fn get_string_from_similarity(similarity: &u32, hash_size: u8) -> String {

@ -435,12 +435,32 @@ impl PrintResults for SimilarVideos {
}
writeln!(writer)?;
}
} else if !self.similar_referenced_vectors.is_empty() {
write!(writer, "{} videos which have similar friends\n\n", self.similar_referenced_vectors.len())?;
for (fe, struct_similar) in &self.similar_referenced_vectors {
writeln!(writer, "Found {} videos which have similar friends", struct_similar.len())?;
writeln!(writer)?;
writeln!(writer, "{} - {}", fe.path.display(), format_size(fe.size, BINARY))?;
for file_entry in struct_similar {
writeln!(writer, "{} - {}", file_entry.path.display(), format_size(file_entry.size, BINARY))?;
}
writeln!(writer)?;
}
} else {
write!(writer, "Not found any similar videos.")?;
}
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
if self.get_use_reference() {
self.save_results_to_file_as_json_internal(file_name, &self.similar_referenced_vectors, pretty_print)
} else {
self.save_results_to_file_as_json_internal(file_name, &self.similar_vectors, pretty_print)
}
}
}
pub fn check_if_ffmpeg_is_installed() -> bool {

@ -10,6 +10,7 @@ use crossbeam_channel::Receiver;
use fun_time::fun_time;
use futures::channel::mpsc::UnboundedSender;
use rayon::prelude::*;
use serde::Serialize;
use crate::common::{check_folder_children, prepare_thread_handler_common, send_info_and_wait_for_ending_all_threads};
use crate::common_dir_traversal::{common_get_entry_data_metadata, common_read_dir, get_lowercase_name, get_modified_time, CheckingMethod, ProgressData, ToolType};
@ -32,7 +33,7 @@ const TEMP_EXTENSIONS: &[&str] = &[
".partial",
];
#[derive(Clone)]
#[derive(Clone, Serialize)]
pub struct FileEntry {
pub path: PathBuf,
pub modified_date: u64,
@ -206,6 +207,10 @@ impl PrintResults for Temporary {
Ok(())
}
fn save_results_to_file_as_json(&self, file_name: &str, pretty_print: bool) -> std::io::Result<()> {
self.save_results_to_file_as_json_internal(file_name, &self.temporary_files, pretty_print)
}
}
impl Default for Temporary {

@ -538,7 +538,7 @@ move_files_title_dialog = Choose folder to which you want to move duplicated fil
move_files_choose_more_than_1_path = Only one path may be selected to be able to copy their duplicated files, selected {$path_number}.
move_stats = Properly moved {$num_files}/{$all_files} items
save_results_to_file = Saved results to file {$name}
save_results_to_file = Saved results both to txt and json files.
search_not_choosing_any_music = ERROR: You must select at least one checkbox with music searching types.
search_not_choosing_any_broken_files = ERROR: You must select at least one checkbox with type of checked broken files.

@ -957,11 +957,11 @@ fn computer_big_files(
let vector = bf.get_big_files();
for (size, file_entry) in vector {
for file_entry in vector {
let (directory, file) = split_path(&file_entry.path);
let values: [(u32, &dyn ToValue); COLUMNS_NUMBER] = [
(ColumnsBigFiles::SelectionButton as u32, &false),
(ColumnsBigFiles::Size as u32, &(format_size(*size, BINARY))),
(ColumnsBigFiles::Size as u32, &(format_size(file_entry.size, BINARY))),
(ColumnsBigFiles::Name as u32, &file),
(ColumnsBigFiles::Path as u32, &directory),
(
@ -969,7 +969,7 @@ fn computer_big_files(
&(NaiveDateTime::from_timestamp_opt(file_entry.modified_date as i64, 0).unwrap().to_string()),
),
(ColumnsBigFiles::ModificationAsSecs as u32, &(file_entry.modified_date as i64)),
(ColumnsBigFiles::SizeAsBytes as u32, &(size)),
(ColumnsBigFiles::SizeAsBytes as u32, &(file_entry.size)),
];
list_store.set(&list_store.append(), &values);
}

@ -2,14 +2,14 @@ use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
use czkawka_core::common_traits::PrintResults;
use gtk4::prelude::*;
use gtk4::{Button, Entry};
use czkawka_core::common_traits::PrintResults;
use crate::flg;
use crate::gui_structs::gui_data::GuiData;
use crate::help_functions::BottomButtonsEnum;
use crate::localizer_core::generate_translation_hashmap;
use crate::notebook_enums::*;
pub fn connect_button_save(gui_data: &GuiData) {
@ -30,64 +30,18 @@ pub fn connect_button_save(gui_data: &GuiData) {
let entry_info = gui_data.entry_info.clone();
let notebook_main = gui_data.main_notebook.notebook_main.clone();
buttons_save.connect_clicked(move |_| {
let file_name;
let result = match to_notebook_main_enum(notebook_main.current_page().unwrap()) {
NotebookMainEnum::Duplicate => {
file_name = "results_duplicates.txt";
shared_duplication_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::EmptyDirectories => {
file_name = "results_empty_folder.txt";
shared_empty_folders_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::EmptyFiles => {
file_name = "results_empty_files.txt";
shared_empty_files_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::Temporary => {
file_name = "results_temporary_files.txt";
shared_temporary_files_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::BigFiles => {
file_name = "results_big_files.txt";
shared_big_files_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::SimilarImages => {
file_name = "results_similar_images.txt";
shared_similar_images_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::SimilarVideos => {
file_name = "results_similar_videos.txt";
shared_similar_videos_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::SameMusic => {
file_name = "results_same_music.txt";
shared_same_music_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::Symlinks => {
file_name = "results_invalid_symlinks.txt";
shared_same_invalid_symlinks.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::BrokenFiles => {
file_name = "results_broken_files.txt";
shared_broken_files_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::BadExtensions => {
file_name = "results_bad_extensions.txt";
shared_bad_extensions_state.borrow_mut().print_results_to_file(file_name)
}
NotebookMainEnum::Duplicate => shared_duplication_state.borrow().save_all_in_one("results_duplicates"),
NotebookMainEnum::EmptyDirectories => shared_empty_folders_state.borrow().save_all_in_one("results_empty_directories"),
NotebookMainEnum::EmptyFiles => shared_empty_files_state.borrow().save_all_in_one("results_empty_files"),
NotebookMainEnum::Temporary => shared_temporary_files_state.borrow().save_all_in_one("results_temporary_files"),
NotebookMainEnum::BigFiles => shared_big_files_state.borrow().save_all_in_one("results_big_files"),
NotebookMainEnum::SimilarImages => shared_similar_images_state.borrow().save_all_in_one("results_similar_images"),
NotebookMainEnum::SimilarVideos => shared_similar_videos_state.borrow().save_all_in_one("results_similar_videos"),
NotebookMainEnum::SameMusic => shared_same_music_state.borrow().save_all_in_one("results_same_music"),
NotebookMainEnum::Symlinks => shared_same_invalid_symlinks.borrow().save_all_in_one("results_invalid_symlinks"),
NotebookMainEnum::BrokenFiles => shared_broken_files_state.borrow().save_all_in_one("results_broken_files"),
NotebookMainEnum::BadExtensions => shared_bad_extensions_state.borrow().save_all_in_one("results_bad_extensions"),
};
match result {
@ -99,7 +53,6 @@ pub fn connect_button_save(gui_data: &GuiData) {
}
post_save_things(
file_name,
&to_notebook_main_enum(notebook_main.current_page().unwrap()),
&shared_buttons,
&entry_info,
@ -109,13 +62,12 @@ pub fn connect_button_save(gui_data: &GuiData) {
}
fn post_save_things(
file_name: &str,
type_of_tab: &NotebookMainEnum,
shared_buttons: &Rc<RefCell<HashMap<NotebookMainEnum, HashMap<BottomButtonsEnum, bool>>>>,
entry_info: &Entry,
buttons_save: &Button,
) {
entry_info.set_text(flg!("save_results_to_file", generate_translation_hashmap(vec![("name", file_name.to_string())])).as_str());
entry_info.set_text(&flg!("save_results_to_file"));
// Set state
{
buttons_save.hide();

Loading…
Cancel
Save