Implement log file limit: add max limit on number of log files allowed, delete oldest if exceeding limit

This commit is contained in:
sean1832 2023-02-13 22:36:18 +11:00
parent 73cc9caaf6
commit c9ada48782
2 changed files with 30 additions and 23 deletions

View File

@ -1,6 +1,5 @@
import streamlit as st import streamlit as st
from modules import utilities as util from modules import utilities as util
import initial_file_creator
import brain import brain
import check_update import check_update
import time import time
@ -14,6 +13,8 @@ st.set_page_config(
page_title='Seanium Brain' page_title='Seanium Brain'
) )
util.remove_oldest_file('.user/log', 10)
model_options = ['text-davinci-003', 'text-curie-001', 'text-babbage-001', 'text-ada-001'] model_options = ['text-davinci-003', 'text-curie-001', 'text-babbage-001', 'text-ada-001']
header = st.container() header = st.container()
body = st.container() body = st.container()
@ -66,7 +67,7 @@ with st.sidebar:
summary_model = st.selectbox('Summary Model', model_options) summary_model = st.selectbox('Summary Model', model_options)
temp = st.slider('Temperature', 0.0, 1.0, value=0.1) temp = st.slider('Temperature', 0.0, 1.0, value=0.1)
max_tokens = st.slider('Max Tokens', 850, 2500, value=1000) max_tokens = st.slider('Max Tokens', 850, 4500, value=1000)
top_p = st.slider('Top_P', 0.0, 1.0, value=1.0) top_p = st.slider('Top_P', 0.0, 1.0, value=1.0)
freq_panl = st.slider('Frequency penalty', 0.0, 1.0, value=0.0) freq_panl = st.slider('Frequency penalty', 0.0, 1.0, value=0.0)
pres_panl = st.slider('Presence penalty', 0.0, 1.0, value=0.0) pres_panl = st.slider('Presence penalty', 0.0, 1.0, value=0.0)

View File

@ -1,5 +1,6 @@
import json import json
import os import os
import glob
# def extract_string(text, delimiter): # def extract_string(text, delimiter):
@ -10,29 +11,34 @@ import os
def extract_string(text, delimiter, force=False, join=True, split_mode=False): def extract_string(text, delimiter, force=False, join=True, split_mode=False):
if not delimiter in text: # Check if delimiter is not in text
if force: if delimiter not in text:
return '' # If force is True, return empty string; otherwise, return the original text
else: return '' if force else text
return text # If split_mode is True, split text by delimiter and return the resulting list
elif split_mode:
return text.split(delimiter)
else: else:
if split_mode: substring = text.split(delimiter)
return text.split(delimiter) result = []
else: # Split text by delimiter and select every second item starting from the second one
substring = text.split(delimiter) for i in range(1, len(substring), 2):
result = [] result.append(substring[i])
for i in range(1, len(substring), 2): # If join is True, join the resulting list into a string and return it; otherwise, return the list
result.append(substring[i]) return ''.join(result) if join else result
if join:
return ''.join(result)
else:
return result
def create_not_exist(path): def remove_oldest_file(directory, max_files):
dir = os.path.dirname(path) files = glob.glob(f'{directory}/*')
if not os.path.exists(dir): if len(files) >= max_files:
os.makedirs(dir) oldest_file = min(files, key=os.path.getctime)
os.remove(oldest_file)
def create_path_not_exist(path):
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
def create_file_not_exist(path): def create_file_not_exist(path):
@ -78,7 +84,7 @@ def read_files(file_dir, delimiter='', force=False, single_string=True):
def write_file(content, filepath, mode='w'): def write_file(content, filepath, mode='w'):
create_not_exist(filepath) create_path_not_exist(filepath)
with open(filepath, mode, encoding='utf-8') as file: with open(filepath, mode, encoding='utf-8') as file:
file.write(content) file.write(content)