From 546ed65e1d60c050df72935c51bcf401184f6283 Mon Sep 17 00:00:00 2001 From: Ozzieisaacs Date: Mon, 10 Jun 2019 12:18:11 +0200 Subject: [PATCH] Update logging Fix sort order author Fixes sorting view Moved version info added feature limit listening to single ipaddress --- .gitattributes | 2 +- cps/__init__.py | 6 +-- cps/admin.py | 24 ++++++++-- cps/cache_buster.py | 6 +-- cps/cli.py | 61 ++++++++++++++++++++++-- cps/comic.py | 4 +- cps/constants.py | 7 +++ cps/editbooks.py | 14 +++--- cps/gdrive.py | 22 ++++----- cps/gdriveutils.py | 16 +++---- cps/helper.py | 48 +++++++++---------- cps/jinjia.py | 4 +- cps/logger.py | 64 +++++++++++++++++++------ cps/oauth_bb.py | 12 ++--- cps/opds.py | 4 +- cps/server.py | 58 ++++++++++++++--------- cps/shelf.py | 28 +++++------ cps/templates/author.html | 18 +++---- cps/templates/config_edit.html | 10 +++- cps/templates/index.html | 18 +++---- cps/ub.py | 37 +++++++++++++-- cps/updater.py | 87 +++++++++++++++------------------- cps/uploader.py | 18 +++---- cps/web.py | 51 ++++++++++---------- cps/worker.py | 24 +++++----- 25 files changed, 401 insertions(+), 242 deletions(-) diff --git a/.gitattributes b/.gitattributes index f4bb7a9f..92739fe9 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,4 +1,4 @@ -updater.py ident export-subst +constants.py ident export-subst /test export-ignore cps/static/css/libs/* linguist-vendored cps/static/js/libs/* linguist-vendored diff --git a/cps/__init__.py b/cps/__init__.py index 3b35570b..dc082507 100755 --- a/cps/__init__.py +++ b/cps/__init__.py @@ -76,7 +76,6 @@ try: with open(os.path.join(_TRANSLATIONS_DIR, 'iso639.pickle'), 'rb') as f: language_table = cPickle.load(f) except cPickle.UnpicklingError as error: - # app.logger.error("Can't read file cps/translations/iso639.pickle: %s", error) print("Can't read file cps/translations/iso639.pickle: %s" % error) sys.exit(1) @@ -89,14 +88,13 @@ from .server import server Server = server() babel = Babel() -log = logger.create() def create_app(): app.wsgi_app = ReverseProxied(app.wsgi_app) cache_buster.init_cache_busting(app) - log.info('Starting Calibre Web...') + logger.info('Starting Calibre Web...') Principal(app) lm.init_app(app) app.secret_key = os.getenv('SECRET_KEY', 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT') @@ -120,7 +118,7 @@ def get_locale(): try: preferred.append(str(LC.parse(x.replace('-', '_')))) except (UnknownLocaleError, ValueError) as e: - log.warning('Could not parse locale "%s": %s', x, e) + logger.warning('Could not parse locale "%s": %s', x, e) preferred.append('en') return negotiate_locale(preferred, translations) diff --git a/cps/admin.py b/cps/admin.py index 39eb8abb..a0b87177 100644 --- a/cps/admin.py +++ b/cps/admin.py @@ -76,7 +76,7 @@ except ImportError: feature_support['gdrive'] = gdrive_support admi = Blueprint('admin', __name__) -log = logger.create() +# log = logger.create() @admi.route("/admin") @@ -220,7 +220,7 @@ def view_configuration(): # stop Server Server.setRestartTyp(True) Server.stopServer() - log.info('Reboot required, restarting') + logger.info('Reboot required, restarting') readColumn = db.session.query(db.Custom_Columns)\ .filter(and_(db.Custom_Columns.datatype == 'bool',db.Custom_Columns.mark_for_delete == 0)).all() return render_title_template("config_view_edit.html", conf=config, readColumns=readColumn, @@ -464,6 +464,24 @@ def configuration_helper(origin): title=_(u"Basic Configuration"), page="config") content.config_logfile = to_save["config_logfile"] + content.config_access_log = 0 + if "config_access_log" in to_save and to_save["config_access_log"] == "on": + content.config_access_log = 1 + reboot_required = True + if "config_access_log" not in to_save and config.config_access_log: + reboot_required = True + + if content.config_access_logfile != to_save["config_access_logfile"]: + # check valid path, only path or file + if not logger.is_valid_logfile(to_save["config_access_logfile"]): + ub.session.commit() + flash(_(u'Access Logfile location is not valid, please enter correct path'), category="error") + return render_title_template("config_edit.html", config=config, origin=origin, + gdriveError=gdriveError, feature_support=feature_support, + title=_(u"Basic Configuration"), page="config") + content.config_access_logfile = to_save["config_access_logfile"] + reboot_required = True + # Rarfile Content configuration if "config_rarfile_location" in to_save and to_save['config_rarfile_location'] is not u"": check = check_unrar(to_save["config_rarfile_location"].strip()) @@ -500,7 +518,7 @@ def configuration_helper(origin): # stop Server Server.setRestartTyp(True) Server.stopServer() - log.info('Reboot required, restarting') + logger.info('Reboot required, restarting') if origin: success = True if is_gdrive_ready() and feature_support['gdrive'] is True: # and config.config_use_google_drive == True: diff --git a/cps/cache_buster.py b/cps/cache_buster.py index 02aa7187..b8389bbd 100644 --- a/cps/cache_buster.py +++ b/cps/cache_buster.py @@ -24,7 +24,7 @@ import hashlib from . import logger -log = logger.create() +# log = logger.create() def init_cache_busting(app): @@ -40,7 +40,7 @@ def init_cache_busting(app): hash_table = {} # map of file hashes - log.debug('Computing cache-busting values...') + logger.debug('Computing cache-busting values...') # compute file hashes for dirpath, __, filenames in os.walk(static_folder): for filename in filenames: @@ -53,7 +53,7 @@ def init_cache_busting(app): file_path = rooted_filename.replace(static_folder, "") file_path = file_path.replace("\\", "/") # Convert Windows path to web path hash_table[file_path] = file_hash - log.debug('Finished computing cache-busting values') + logger.debug('Finished computing cache-busting values') def bust_filename(filename): return hash_table.get(filename, "") diff --git a/cps/cli.py b/cps/cli.py index 5d304ad0..1bda9c93 100644 --- a/cps/cli.py +++ b/cps/cli.py @@ -24,6 +24,48 @@ import os import argparse from .constants import CONFIG_DIR as _CONFIG_DIR +from .constants import STABLE_VERSION as _STABLE_VERSION +from .constants import NIGHTLY_VERSION as _NIGHTLY_VERSION + +VALID_CHARACTERS = 'ABCDEFabcdef:0123456789' + +ipv6 = False + + +def version_info(): + if _NIGHTLY_VERSION[1].startswith('$Format'): + return "Calibre-Web version: %s - unkown git-clone" % _STABLE_VERSION['version'] + else: + return "Calibre-Web version: %s -%s" % (_STABLE_VERSION['version'],_NIGHTLY_VERSION[1]) + + +def validate_ip4(address): + address_list = address.split('.') + if len(address_list) != 4: + return False + for val in address_list: + if not val.isdigit(): + return False + i = int(val) + if i < 0 or i > 255: + return False + return True + + +def validate_ip6(address): + address_list = address.split(':') + return ( + len(address_list) == 8 + and all(len(current) <= 4 for current in address_list) + and all(current in VALID_CHARACTERS for current in address) + ) + + +def validate_ip(address): + if validate_ip4(address) or ipv6: + return address + print("IP address is invalid. Exiting") + sys.exit(1) parser = argparse.ArgumentParser(description='Calibre Web is a web app' @@ -34,12 +76,17 @@ parser.add_argument('-c', metavar='path', help='path and name to SSL certfile, e.g. /opt/test.cert, works only in combination with keyfile') parser.add_argument('-k', metavar='path', help='path and name to SSL keyfile, e.g. /opt/test.key, works only in combination with certfile') -parser.add_argument('-v', action='store_true', help='shows version number and exits Calibre-web') +parser.add_argument('-v', '--version', action='version', help='Shows version number and exits Calibre-web', + version=version_info()) +parser.add_argument('-i', metavar='ip-adress', help='Server IP-Adress to listen') +parser.add_argument('-s', metavar='user:pass', help='Sets specific username to new password') args = parser.parse_args() + settingspath = args.p or os.path.join(_CONFIG_DIR, "app.db") gdpath = args.g or os.path.join(_CONFIG_DIR, "gdrive.db") +# handle and check parameter for ssl encryption certfilepath = None keyfilepath = None if args.c: @@ -66,6 +113,12 @@ if (args.k and not args.c) or (not args.k and args.c): if args.k is "": keyfilepath = "" -if args.v: - print("Calibre-web version: 0.6.4") - sys.exit(1) +# handle and check ipadress argument +if args.i: + ipv6 = validate_ip6(args.i) + ipadress = validate_ip(args.i) +else: + ipadress = None + +# handle and check user password argument +user_password = args.s or None diff --git a/cps/comic.py b/cps/comic.py index 738b2a89..d5625cbb 100755 --- a/cps/comic.py +++ b/cps/comic.py @@ -24,14 +24,14 @@ from . import logger, isoLanguages from .constants import BookMeta -log = logger.create() +# log = logger.create() try: from comicapi.comicarchive import ComicArchive, MetaDataStyle use_comic_meta = True except ImportError as e: - log.warning('cannot import comicapi, extracting comic metadata will not work: %s', e) + logger.warning('cannot import comicapi, extracting comic metadata will not work: %s', e) import zipfile import tarfile use_comic_meta = False diff --git a/cps/constants.py b/cps/constants.py index 88ae7c13..aaf0cd21 100644 --- a/cps/constants.py +++ b/cps/constants.py @@ -111,6 +111,13 @@ BookMeta = namedtuple('BookMeta', 'file_path, extension, title, author, cover, d STABLE_VERSION = {'version': '0.6.4 Beta'} +NIGHTLY_VERSION = {} +NIGHTLY_VERSION[0] = '$Format:%H$' +NIGHTLY_VERSION[1] = '$Format:%cI$' +# NIGHTLY_VERSION[0] = 'bb7d2c6273ae4560e83950d36d64533343623a57' +# NIGHTLY_VERSION[1] = '2018-09-09T10:13:08+02:00' + + # clean-up the module namespace del sys, os, namedtuple diff --git a/cps/editbooks.py b/cps/editbooks.py index e15011fb..f6f2a788 100644 --- a/cps/editbooks.py +++ b/cps/editbooks.py @@ -39,7 +39,7 @@ from .web import login_required_if_no_ano, render_title_template, edit_required, editbook = Blueprint('editbook', __name__) -log = logger.create() +# log = logger.create() # Modifies different Database objects, first check if elements have to be added to database, than check @@ -198,7 +198,7 @@ def delete_book(book_id, book_format): db.session.commit() else: # book not found - log.error('Book with id "%s" could not be deleted: not found', book_id) + logger.error('Book with id "%s" could not be deleted: not found', book_id) if book_format: return redirect(url_for('editbook.edit_book', book_id=book_id)) else: @@ -237,7 +237,7 @@ def render_edit_book(book_id): try: allowed_conversion_formats.remove(file.format.lower()) except Exception: - log.warning('%s already removed from list.', file.format.lower()) + logger.warning('%s already removed from list.', file.format.lower()) return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc, title=_(u"edit metadata"), page="editbook", @@ -349,7 +349,7 @@ def upload_single_file(request, book, book_id): # Format entry already exists, no need to update the database if is_format: - log.warning('Book format %s already existing', file_ext.upper()) + logger.warning('Book format %s already existing', file_ext.upper()) else: db_format = db.Data(book_id, file_ext.upper(), file_size, file_name) db.session.add(db_format) @@ -492,7 +492,7 @@ def edit_book(book_id): res = list(language_table[get_locale()].keys())[invers_lang_table.index(lang)] input_l.append(res) except ValueError: - log.error('%s is not a valid language', lang) + logger.error('%s is not a valid language', lang) flash(_(u"%(langname)s is not a valid language", langname=lang), category="error") modify_database_object(input_l, book.languages, db.Languages, db.session, 'languages') @@ -531,7 +531,7 @@ def edit_book(book_id): flash(error, category="error") return render_edit_book(book_id) except Exception as e: - log.exception(e) + logger.exception(e) db.session.rollback() flash(_("Error editing book, please check logfile for details"), category="error") return redirect(url_for('web.show_book', book_id=book.id)) @@ -703,7 +703,7 @@ def convert_bookformat(book_id): flash(_(u"Source or destination format for conversion missing"), category="error") return redirect(request.environ["HTTP_REFERER"]) - log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to) + logger.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to) rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(), book_format_to.upper(), current_user.nickname) diff --git a/cps/gdrive.py b/cps/gdrive.py index 196b9dac..a604dc01 100644 --- a/cps/gdrive.py +++ b/cps/gdrive.py @@ -44,7 +44,7 @@ from .web import admin_required gdrive = Blueprint('gdrive', __name__) -log = logger.create() +# log = logger.create() current_milli_time = lambda: int(round(time() * 1000)) @@ -74,7 +74,7 @@ def google_drive_callback(): with open(gdriveutils.CREDENTIALS, 'w') as f: f.write(credentials.to_json()) except ValueError as error: - log.error(error) + logger.error(error) return redirect(url_for('admin.configuration')) @@ -131,7 +131,7 @@ def revoke_watch_gdrive(): @gdrive.route("/gdrive/watch/callback", methods=['GET', 'POST']) def on_received_watch_confirmation(): - log.debug('%r', request.headers) + logger.debug('%r', request.headers) if request.headers.get('X-Goog-Channel-Token') == gdrive_watch_callback_token \ and request.headers.get('X-Goog-Resource-State') == 'change' \ and request.data: @@ -139,26 +139,26 @@ def on_received_watch_confirmation(): data = request.data def updateMetaData(): - log.info('Change received from gdrive') - log.debug('%r', data) + logger.info('Change received from gdrive') + logger.debug('%r', data) try: j = json.loads(data) - log.info('Getting change details') + logger.info('Getting change details') response = gdriveutils.getChangeById(gdriveutils.Gdrive.Instance().drive, j['id']) - log.debug('%r', response) + logger.debug('%r', response) if response: dbpath = os.path.join(config.config_calibre_dir, "metadata.db") if not response['deleted'] and response['file']['title'] == 'metadata.db' and response['file']['md5Checksum'] != hashlib.md5(dbpath): tmpDir = tempfile.gettempdir() - log.info('Database file updated') + logger.info('Database file updated') copyfile(dbpath, os.path.join(tmpDir, "metadata.db_" + str(current_milli_time()))) - log.info('Backing up existing and downloading updated metadata.db') + logger.info('Backing up existing and downloading updated metadata.db') gdriveutils.downloadFile(None, "metadata.db", os.path.join(tmpDir, "tmp_metadata.db")) - log.info('Setting up new DB') + logger.info('Setting up new DB') # prevent error on windows, as os.rename does on exisiting files move(os.path.join(tmpDir, "tmp_metadata.db"), dbpath) db.setup_db() except Exception as e: - log.exception(e) + logger.exception(e) updateMetaData() return '' diff --git a/cps/gdriveutils.py b/cps/gdriveutils.py index 705966b2..a7036712 100644 --- a/cps/gdriveutils.py +++ b/cps/gdriveutils.py @@ -45,7 +45,7 @@ SETTINGS_YAML = os.path.join(_BASE_DIR, 'settings.yaml') CREDENTIALS = os.path.join(_BASE_DIR, 'gdrive_credentials') CLIENT_SECRETS = os.path.join(_BASE_DIR, 'client_secrets.json') -log = logger.create() +# log = logger.create() class Singleton: @@ -169,9 +169,9 @@ def getDrive(drive=None, gauth=None): try: gauth.Refresh() except RefreshError as e: - log.error("Google Drive error: %s", e) + logger.error("Google Drive error: %s", e) except Exception as e: - log.exception(e) + logger.exception(e) else: # Initialize the saved creds gauth.Authorize() @@ -181,7 +181,7 @@ def getDrive(drive=None, gauth=None): try: drive.auth.Refresh() except RefreshError as e: - log.error("Google Drive error: %s", e) + logger.error("Google Drive error: %s", e) return drive def listRootFolders(): @@ -218,7 +218,7 @@ def getEbooksFolderId(drive=None): try: gDriveId.gdrive_id = getEbooksFolder(drive)['id'] except Exception: - log.error('Error gDrive, root ID not found') + logger.error('Error gDrive, root ID not found') gDriveId.path = '/' session.merge(gDriveId) session.commit() @@ -458,10 +458,10 @@ def getChangeById (drive, change_id): change = drive.auth.service.changes().get(changeId=change_id).execute() return change except (errors.HttpError) as error: - log.error(error) + logger.error(error) return None except Exception as e: - log.error(e) + logger.error(e) return None @@ -531,6 +531,6 @@ def do_gdrive_download(df, headers): if resp.status == 206: yield content else: - log.warning('An error occurred: %s', resp) + logger.warning('An error occurred: %s', resp) return return Response(stream_with_context(stream()), headers=headers) diff --git a/cps/helper.py b/cps/helper.py index eb5374bc..09998ff2 100644 --- a/cps/helper.py +++ b/cps/helper.py @@ -75,7 +75,7 @@ from .worker import STAT_WAITING, STAT_FAIL, STAT_STARTED, STAT_FINISH_SUCCESS from .worker import TASK_EMAIL, TASK_CONVERT, TASK_UPLOAD, TASK_CONVERT_ANY -log = logger.create() +# log = logger.create() def update_download(book_id, user_id): @@ -92,7 +92,7 @@ def convert_book_format(book_id, calibrepath, old_book_format, new_book_format, data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == old_book_format).first() if not data: error_message = _(u"%(format)s format not found for book id: %(book)d", format=old_book_format, book=book_id) - log.error("convert_book_format: %s", error_message) + logger.error("convert_book_format: %s", error_message) return error_message if config.config_use_google_drive: df = gd.getFileFromEbooksFolder(book.path, data.name + "." + old_book_format.lower()) @@ -186,7 +186,7 @@ def check_send_to_kindle(entry): 'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Epub',format='Azw3')})''' return bookformats else: - log.error(u'Cannot find book entry %d', entry.id) + logger.error(u'Cannot find book entry %d', entry.id) return None @@ -272,7 +272,7 @@ def get_sorted_author(value): else: value2 = value except Exception as ex: - log.error("Sorting author %s failed: %s", value, ex) + logger.error("Sorting author %s failed: %s", value, ex) value2 = value return value2 @@ -289,12 +289,12 @@ def delete_book_file(book, calibrepath, book_format=None): else: if os.path.isdir(path): if len(next(os.walk(path))[1]): - log.error("Deleting book %s failed, path has subfolders: %s", book.id, book.path) + logger.error("Deleting book %s failed, path has subfolders: %s", book.id, book.path) return False shutil.rmtree(path, ignore_errors=True) return True else: - log.error("Deleting book %s failed, book path not valid: %s", book.id, book.path) + logger.error("Deleting book %s failed, book path not valid: %s", book.id, book.path) return False @@ -317,7 +317,7 @@ def update_dir_structure_file(book_id, calibrepath, first_author): if not os.path.exists(new_title_path): os.renames(path, new_title_path) else: - log.info("Copying title: %s into existing: %s", path, new_title_path) + logger.info("Copying title: %s into existing: %s", path, new_title_path) for dir_name, __, file_list in os.walk(path): for file in file_list: os.renames(os.path.join(dir_name, file), @@ -325,8 +325,8 @@ def update_dir_structure_file(book_id, calibrepath, first_author): path = new_title_path localbook.path = localbook.path.split('/')[0] + '/' + new_titledir except OSError as ex: - log.error("Rename title from: %s to %s: %s", path, new_title_path, ex) - log.debug(ex, exc_info=True) + logger.error("Rename title from: %s to %s: %s", path, new_title_path, ex) + logger.debug(ex, exc_info=True) return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s", src=path, dest=new_title_path, error=str(ex)) if authordir != new_authordir: @@ -335,8 +335,8 @@ def update_dir_structure_file(book_id, calibrepath, first_author): os.renames(path, new_author_path) localbook.path = new_authordir + '/' + localbook.path.split('/')[1] except OSError as ex: - log.error("Rename author from: %s to %s: %s", path, new_author_path, ex) - log.debug(ex, exc_info=True) + logger.error("Rename author from: %s to %s: %s", path, new_author_path, ex) + logger.debug(ex, exc_info=True) return _("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s", src=path, dest=new_author_path, error=str(ex)) # Rename all files from old names to new names @@ -349,8 +349,8 @@ def update_dir_structure_file(book_id, calibrepath, first_author): os.path.join(path_name, new_name + '.' + file_format.format.lower())) file_format.name = new_name except OSError as ex: - log.error("Rename file in path %s to %s: %s", path, new_name, ex) - log.debug(ex, exc_info=True) + logger.error("Rename file in path %s to %s: %s", path, new_name, ex) + logger.debug(ex, exc_info=True) return _("Rename file in path '%(src)s' to '%(dest)s' failed with error: %(error)s", src=path, dest=new_name, error=str(ex)) return False @@ -454,10 +454,10 @@ def get_book_cover(book_id): if path: return redirect(path) else: - log.error('%s/cover.jpg not found on Google Drive', book.path) + logger.error('%s/cover.jpg not found on Google Drive', book.path) return send_from_directory(_STATIC_DIR, "generic_cover.jpg") except Exception as e: - log.exception(e) + logger.exception(e) # traceback.print_exc() return send_from_directory(_STATIC_DIR,"generic_cover.jpg") else: @@ -487,15 +487,15 @@ def save_cover_from_filestorage(filepath, saved_filename, img): try: os.makedirs(filepath) except OSError: - log.error(u"Failed to create path for cover") + logger.error(u"Failed to create path for cover") return False try: img.save(os.path.join(filepath, saved_filename)) except IOError: - log.error(u"Cover-file is not a valid image file") + logger.error(u"Cover-file is not a valid image file") return False except OSError: - log.error(u"Failed to store cover-file") + logger.error(u"Failed to store cover-file") return False return True @@ -506,7 +506,7 @@ def save_cover(img, book_path): if use_PIL: if content_type not in ('image/jpeg', 'image/png', 'image/webp'): - log.error("Only jpg/jpeg/png/webp files are supported as coverfile") + logger.error("Only jpg/jpeg/png/webp files are supported as coverfile") return False # convert to jpg because calibre only supports jpg if content_type in ('image/png', 'image/webp'): @@ -520,7 +520,7 @@ def save_cover(img, book_path): img._content = tmp_bytesio.getvalue() else: if content_type not in ('image/jpeg'): - log.error("Only jpg/jpeg files are supported as coverfile") + logger.error("Only jpg/jpeg files are supported as coverfile") return False if ub.config.config_use_google_drive: @@ -528,7 +528,7 @@ def save_cover(img, book_path): if save_cover_from_filestorage(tmpDir, "uploaded_cover.jpg", img) is True: gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg'), os.path.join(tmpDir, "uploaded_cover.jpg")) - log.info("Cover is saved on Google Drive") + logger.info("Cover is saved on Google Drive") return True else: return False @@ -541,7 +541,7 @@ def do_download_file(book, book_format, data, headers): if config.config_use_google_drive: startTime = time.time() df = gd.getFileFromEbooksFolder(book.path, data.name + "." + book_format) - log.debug('%s', time.time() - startTime) + logger.debug('%s', time.time() - startTime) if df: return gd.do_gdrive_download(df, headers) else: @@ -550,7 +550,7 @@ def do_download_file(book, book_format, data, headers): filename = os.path.join(config.config_calibre_dir, book.path) if not os.path.isfile(os.path.join(filename, data.name + "." + book_format)): # ToDo: improve error handling - log.error('File not found: %s', os.path.join(filename, data.name + "." + book_format)) + logger.error('File not found: %s', os.path.join(filename, data.name + "." + book_format)) response = make_response(send_from_directory(filename, data.name + "." + book_format)) response.headers = headers return response @@ -575,7 +575,7 @@ def check_unrar(unrarLocation): version = value.group(1) except OSError as e: error = True - log.exception(e) + logger.exception(e) version =_(u'Error excecuting UnRar') else: version = _(u'Unrar binary file not found') diff --git a/cps/jinjia.py b/cps/jinjia.py index 37f9ce30..04f8c2f0 100644 --- a/cps/jinjia.py +++ b/cps/jinjia.py @@ -37,7 +37,7 @@ from . import logger jinjia = Blueprint('jinjia', __name__) -log = logger.create() +# log = logger.create() # pagination links in jinja @@ -85,7 +85,7 @@ def formatdate_filter(val): formatdate = datetime.datetime.strptime(conformed_timestamp[:15], "%Y%m%d %H%M%S") return format_date(formatdate, format='medium', locale=get_locale()) except AttributeError as e: - log.error('Babel error: %s, Current user locale: %s, Current User: %s', e, current_user.locale, current_user.nickname) + logger.error('Babel error: %s, Current user locale: %s, Current User: %s', e, current_user.locale, current_user.nickname) return formatdate @jinjia.app_template_filter('formatdateinput') diff --git a/cps/logger.py b/cps/logger.py index 1b7bb3ec..4bb95bce 100644 --- a/cps/logger.py +++ b/cps/logger.py @@ -25,23 +25,50 @@ from logging.handlers import RotatingFileHandler from .constants import BASE_DIR as _BASE_DIR - +ACCESS_FORMATTER = Formatter("%(message)s") FORMATTER = Formatter("[%(asctime)s] %(levelname)5s {%(name)s:%(lineno)d} %(message)s") DEFAULT_LOG_LEVEL = logging.INFO DEFAULT_LOG_FILE = os.path.join(_BASE_DIR, "calibre-web.log") +DEFAULT_ACCESS_LOG = os.path.join(_BASE_DIR, "access.log") LOG_TO_STDERR = '/dev/stderr' - +DEFAULT_ACCESS_LEVEL= logging.INFO logging.addLevelName(logging.WARNING, "WARN") logging.addLevelName(logging.CRITICAL, "CRIT") +def info(msg, *args, **kwargs): + create(2).info(msg, *args, **kwargs) + + +def warning(msg, *args, **kwargs): + create(2).warning(msg, *args, **kwargs) + + +def error(msg, *args, **kwargs): + create(2).error(msg, *args, **kwargs) + + +def critical(msg, *args, **kwargs): + create(2).critical(msg, *args, **kwargs) + + +def exception(msg, *args, **kwargs): + create(2).exception(msg, *args, **kwargs) + + +def debug(msg, *args, **kwargs): + create(2).debug(msg, *args, **kwargs) + + def get(name=None): - return logging.getLogger(name) + val = logging.getLogger("general") + val.name = name + return val -def create(): - parent_frame = inspect.stack(0)[1] +def create(ini=1): + parent_frame = inspect.stack(0)[ini] if hasattr(parent_frame, 'frame'): parent_frame = parent_frame.frame else: @@ -50,8 +77,11 @@ def create(): return get(parent_module.__name__) -def is_debug_enabled(): - return logging.root.level <= logging.DEBUG +def is_debug_enabled(logger): + return logging.getLogger(logger).level <= logging.DEBUG + +def is_info_enabled(logger): + return logging.getLogger(logger).level <= logging.INFO def get_level_name(level): @@ -67,17 +97,23 @@ def is_valid_logfile(file_path): return (not log_dir) or os.path.isdir(log_dir) -def setup(log_file, log_level=None): +def setup(log_file, logger, log_level=None): + if logger == "general": + formatter = FORMATTER + default_file = DEFAULT_LOG_FILE + else: + formatter = ACCESS_FORMATTER + default_file = DEFAULT_ACCESS_LOG if log_file: if not os.path.dirname(log_file): log_file = os.path.join(_BASE_DIR, log_file) log_file = os.path.abspath(log_file) else: # log_file = LOG_TO_STDERR - log_file = DEFAULT_LOG_FILE + log_file = default_file # print ('%r -- %r' % (log_level, log_file)) - r = logging.root + r = logging.getLogger(logger) r.setLevel(log_level or DEFAULT_LOG_LEVEL) previous_handler = r.handlers[0] if r.handlers else None @@ -96,10 +132,10 @@ def setup(log_file, log_level=None): try: file_handler = RotatingFileHandler(log_file, maxBytes=50000, backupCount=2) except IOError: - if log_file == DEFAULT_LOG_FILE: + if log_file == default_file: raise - file_handler = RotatingFileHandler(DEFAULT_LOG_FILE, maxBytes=50000, backupCount=2) - file_handler.setFormatter(FORMATTER) + file_handler = RotatingFileHandler(default_file, maxBytes=50000, backupCount=2) + file_handler.setFormatter(formatter) for h in r.handlers: r.removeHandler(h) @@ -122,4 +158,4 @@ class StderrLogger(object): else: self.buffer += message except Exception: - self.logger.debug("Logging Error") + self.log.debug("Logging Error") diff --git a/cps/oauth_bb.py b/cps/oauth_bb.py index 3a48798a..d9db3e0e 100644 --- a/cps/oauth_bb.py +++ b/cps/oauth_bb.py @@ -42,7 +42,7 @@ from .web import login_required oauth_check = {} oauth = Blueprint('oauth', __name__) -log = logger.create() +# log = logger.create() def github_oauth_required(f): @@ -105,7 +105,7 @@ def register_user_with_oauth(user=None): try: ub.session.commit() except Exception as e: - log.exception(e) + logger.exception(e) ub.session.rollback() @@ -199,7 +199,7 @@ if ub.oauth_support: ub.session.add(oauth) ub.session.commit() except Exception as e: - log.exception(e) + logger.exception(e) ub.session.rollback() # Disable Flask-Dance's default behavior for saving the OAuth token @@ -225,7 +225,7 @@ if ub.oauth_support: ub.session.add(oauth) ub.session.commit() except Exception as e: - log.exception(e) + logger.exception(e) ub.session.rollback() return redirect(url_for('web.login')) #if config.config_public_reg: @@ -268,11 +268,11 @@ if ub.oauth_support: logout_oauth_user() flash(_(u"Unlink to %(oauth)s success.", oauth=oauth_check[provider]), category="success") except Exception as e: - log.exception(e) + logger.exception(e) ub.session.rollback() flash(_(u"Unlink to %(oauth)s failed.", oauth=oauth_check[provider]), category="error") except NoResultFound: - log.warning("oauth %s for user %d not fount", provider, current_user.id) + logger.warning("oauth %s for user %d not fount", provider, current_user.id) flash(_(u"Not linked to %(oauth)s.", oauth=oauth_check[provider]), category="error") return redirect(url_for('web.profile')) diff --git a/cps/opds.py b/cps/opds.py index dd7755bb..dc5aea02 100644 --- a/cps/opds.py +++ b/cps/opds.py @@ -108,8 +108,8 @@ def feed_best_rated(): @requires_basic_auth_if_no_ano def feed_hot(): off = request.args.get("offset") or 0 - all_books = ub.session.query(ub.Downloads, ub.func.count(ub.Downloads.book_id)).order_by( - ub.func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id) + all_books = ub.session.query(ub.Downloads, func.count(ub.Downloads.book_id)).order_by( + func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id) hot_books = all_books.offset(off).limit(config.config_books_per_page) entries = list() for book in hot_books: diff --git a/cps/server.py b/cps/server.py index 3dd03257..a8d4cc16 100644 --- a/cps/server.py +++ b/cps/server.py @@ -22,6 +22,7 @@ import sys import os import signal import socket +import logging try: from gevent.pywsgi import WSGIServer @@ -38,14 +39,12 @@ except ImportError: from . import logger, config, global_WorkerThread -log = logger.create() - - class server: wsgiserver = None restart = False app = None + access_logger = None def __init__(self): signal.signal(signal.SIGINT, self.killServer) @@ -54,6 +53,11 @@ class server: def init_app(self, application): self.app = application self.port = config.config_port + self.listening = config.get_config_ipaddress(readable=True) + ":" + str(self.port) + if config.config_access_log: + self.access_logger = logging.getLogger("access") + else: + self.access_logger = None self.ssl_args = None certfile_path = config.get_config_certfile() @@ -63,54 +67,64 @@ class server: self.ssl_args = {"certfile": certfile_path, "keyfile": keyfile_path} else: - log.warning('The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl.') - log.warning('Cert path: %s', certfile_path) - log.warning('Key path: %s', keyfile_path) + logger.warning('The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl.') + logger.warning('Cert path: %s', certfile_path) + logger.warning('Key path: %s', keyfile_path) def _make_gevent_socket(self): + if config.get_config_ipaddress(): + return (config.get_config_ipaddress(), self.port) if os.name == 'nt': return ('0.0.0.0', self.port) try: s = WSGIServer.get_listener(('', self.port), family=socket.AF_INET6) except socket.error as ex: - log.error('%s', ex) - log.warning('Unable to listen on \'\', trying on IPv4 only...') + logger.error('%s', ex) + logger.warning('Unable to listen on \'\', trying on IPv4 only...') s = WSGIServer.get_listener(('', self.port), family=socket.AF_INET) - log.debug("%r %r", s._sock, s._sock.getsockname()) + logger.debug("%r %r", s._sock, s._sock.getsockname()) return s def start_gevent(self): ssl_args = self.ssl_args or {} - log.info('Starting Gevent server') + logger.info('Starting Gevent server on %s', self.listening) try: sock = self._make_gevent_socket() - self.wsgiserver = WSGIServer(sock, self.app, spawn=Pool(), **ssl_args) + self.wsgiserver = WSGIServer(sock, self.app, log=self.access_logger, spawn=Pool(), **ssl_args) self.wsgiserver.serve_forever() - except (OSError, socket.error) as e: - log.info("Error starting server: %s", e.strerror) - print("Error starting server: %s" % e.strerror) - global_WorkerThread.stop() - sys.exit(1) + except socket.error: + try: + logger.info('Unable to listen on "", trying on "0.0.0.0" only...') + self.wsgiserver = WSGIServer(('0.0.0.0', config.config_port), self.app, spawn=Pool(), **ssl_args) + self.wsgiserver.serve_forever() + except (OSError, socket.error) as e: + logger.info("Error starting server: %s", e.strerror) + print("Error starting server: %s" % e.strerror) + global_WorkerThread.stop() + sys.exit(1) except Exception: - log.exception("Unknown error while starting gevent") + logger.exception("Unknown error while starting gevent") + sys.exit(0) def start_tornado(self): - log.info('Starting Tornado server') + logger.info('Starting Tornado server on %s', self.listening) try: # Max Buffersize set to 200MB http_server = HTTPServer(WSGIContainer(self.app), max_buffer_size = 209700000, ssl_options=self.ssl_args) - http_server.listen(self.port) + address = config.get_config_ipaddress() + http_server.listen(self.port, address) + # self.access_log = logging.getLogger("tornado.access") self.wsgiserver=IOLoop.instance() self.wsgiserver.start() # wait for stop signal self.wsgiserver.close(True) except socket.error as err: - log.exception("Error starting tornado server") + logger.exception("Error starting tornado server") print("Error starting server: %s" % err.strerror) global_WorkerThread.stop() sys.exit(1) @@ -123,7 +137,7 @@ class server: self.start_tornado() if self.restart is True: - log.info("Performing restart of Calibre-Web") + logger.info("Performing restart of Calibre-Web") global_WorkerThread.stop() if os.name == 'nt': arguments = ["\"" + sys.executable + "\""] @@ -133,7 +147,7 @@ class server: else: os.execl(sys.executable, sys.executable, *sys.argv) else: - log.info("Performing shutdown of Calibre-Web") + logger.info("Performing shutdown of Calibre-Web") global_WorkerThread.stop() sys.exit(0) diff --git a/cps/shelf.py b/cps/shelf.py index a34dbfed..0c4cdaa0 100644 --- a/cps/shelf.py +++ b/cps/shelf.py @@ -33,7 +33,7 @@ from .web import render_title_template shelf = Blueprint('shelf', __name__) -log = logger.create() +# log = logger.create() @shelf.route("/shelf/add//") @@ -41,14 +41,14 @@ log = logger.create() def add_to_shelf(shelf_id, book_id): shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first() if shelf is None: - log.error("Invalid shelf specified: %s", shelf_id) + logger.error("Invalid shelf specified: %s", shelf_id) if not request.is_xhr: flash(_(u"Invalid shelf specified"), category="error") return redirect(url_for('web.index')) return "Invalid shelf specified", 400 if not shelf.is_public and not shelf.user_id == int(current_user.id): - log.error("User %s not allowed to add a book to %s", current_user, shelf) + logger.error("User %s not allowed to add a book to %s", current_user, shelf) if not request.is_xhr: flash(_(u"Sorry you are not allowed to add a book to the the shelf: %(shelfname)s", shelfname=shelf.name), category="error") @@ -56,7 +56,7 @@ def add_to_shelf(shelf_id, book_id): return "Sorry you are not allowed to add a book to the the shelf: %s" % shelf.name, 403 if shelf.is_public and not current_user.role_edit_shelfs(): - log.info("User %s not allowed to edit public shelves", current_user) + logger.info("User %s not allowed to edit public shelves", current_user) if not request.is_xhr: flash(_(u"You are not allowed to edit public shelves"), category="error") return redirect(url_for('web.index')) @@ -65,7 +65,7 @@ def add_to_shelf(shelf_id, book_id): book_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id, ub.BookShelf.book_id == book_id).first() if book_in_shelf: - log.error("Book %s is already part of %s", book_id, shelf) + logger.error("Book %s is already part of %s", book_id, shelf) if not request.is_xhr: flash(_(u"Book is already part of the shelf: %(shelfname)s", shelfname=shelf.name), category="error") return redirect(url_for('web.index')) @@ -94,17 +94,17 @@ def add_to_shelf(shelf_id, book_id): def search_to_shelf(shelf_id): shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first() if shelf is None: - log.error("Invalid shelf specified: %s", shelf_id) + logger.error("Invalid shelf specified: %s", shelf_id) flash(_(u"Invalid shelf specified"), category="error") return redirect(url_for('web.index')) if not shelf.is_public and not shelf.user_id == int(current_user.id): - log.error("User %s not allowed to add a book to %s", current_user, shelf) + logger.error("User %s not allowed to add a book to %s", current_user, shelf) flash(_(u"You are not allowed to add a book to the the shelf: %(name)s", name=shelf.name), category="error") return redirect(url_for('web.index')) if shelf.is_public and not current_user.role_edit_shelfs(): - log.error("User %s not allowed to edit public shelves", current_user) + logger.error("User %s not allowed to edit public shelves", current_user) flash(_(u"User is not allowed to edit public shelves"), category="error") return redirect(url_for('web.index')) @@ -122,7 +122,7 @@ def search_to_shelf(shelf_id): books_for_shelf = searched_ids[current_user.id] if not books_for_shelf: - log.error("Books are already part of %s", shelf) + logger.error("Books are already part of %s", shelf) flash(_(u"Books are already part of the shelf: %(name)s", name=shelf.name), category="error") return redirect(url_for('web.index')) @@ -148,7 +148,7 @@ def search_to_shelf(shelf_id): def remove_from_shelf(shelf_id, book_id): shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first() if shelf is None: - log.error("Invalid shelf specified: %s", shelf_id) + logger.error("Invalid shelf specified: %s", shelf_id) if not request.is_xhr: return redirect(url_for('web.index')) return "Invalid shelf specified", 400 @@ -167,7 +167,7 @@ def remove_from_shelf(shelf_id, book_id): ub.BookShelf.book_id == book_id).first() if book_shelf is None: - log.error("Book %s already removed from %s", book_id, shelf) + logger.error("Book %s already removed from %s", book_id, shelf) if not request.is_xhr: return redirect(url_for('web.index')) return "Book already removed from shelf", 410 @@ -180,7 +180,7 @@ def remove_from_shelf(shelf_id, book_id): return redirect(request.environ["HTTP_REFERER"]) return "", 204 else: - log.error("User %s not allowed to remove a book from %s", current_user, shelf) + logger.error("User %s not allowed to remove a book from %s", current_user, shelf) if not request.is_xhr: flash(_(u"Sorry you are not allowed to remove a book from this shelf: %(sname)s", sname=shelf.name), category="error") @@ -262,7 +262,7 @@ def delete_shelf(shelf_id): if deleted: ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete() ub.session.commit() - log.info("successfully deleted %s", cur_shelf) + logger.info("successfully deleted %s", cur_shelf) return redirect(url_for('web.index')) # @shelf.route("/shelfdown/") @@ -289,7 +289,7 @@ def show_shelf(shelf_type, shelf_id): if cur_book: result.append(cur_book) else: - log.info('Not existing book %s in %s deleted', book.book_id, shelf) + logger.info('Not existing book %s in %s deleted', book.book_id, shelf) ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book.book_id).delete() ub.session.commit() return render_title_template(page, entries=result, title=_(u"Shelf: '%(name)s'", name=shelf.name), diff --git a/cps/templates/author.html b/cps/templates/author.html index c3dad34c..14883ef7 100644 --- a/cps/templates/author.html +++ b/cps/templates/author.html @@ -23,16 +23,16 @@

{{_("In Library")}}

{% endif %} diff --git a/cps/templates/index.html b/cps/templates/index.html index 0faed5fd..b65beccb 100755 --- a/cps/templates/index.html +++ b/cps/templates/index.html @@ -54,15 +54,15 @@

{{_(title)}}

diff --git a/cps/ub.py b/cps/ub.py index b8d44375..502b7893 100644 --- a/cps/ub.py +++ b/cps/ub.py @@ -39,6 +39,7 @@ from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime from sqlalchemy.orm import relationship, sessionmaker from sqlalchemy.ext.declarative import declarative_base from werkzeug.security import generate_password_hash +import logging try: import ldap @@ -330,6 +331,7 @@ class Settings(Base): config_read_column = Column(Integer, default=0) config_title_regex = Column(String, default=u'^(A|The|An|Der|Die|Das|Den|Ein|Eine|Einen|Dem|Des|Einem|Eines)\s+') config_log_level = Column(SmallInteger, default=logger.DEFAULT_LOG_LEVEL) + config_access_log = Column(SmallInteger, default=0) config_uploading = Column(SmallInteger, default=0) config_anonbrowse = Column(SmallInteger, default=0) config_public_reg = Column(SmallInteger, default=0) @@ -355,6 +357,7 @@ class Settings(Base): config_google_oauth_client_secret = Column(String) config_mature_content_tags = Column(String) config_logfile = Column(String) + config_access_logfile = Column(String) config_ebookconverter = Column(Integer, default=0) config_converterpath = Column(String) config_calibre = Column(String) @@ -404,6 +407,7 @@ class Config: self.config_title_regex = data.config_title_regex self.config_read_column = data.config_read_column self.config_log_level = data.config_log_level + self.config_access_log = data.config_access_log self.config_uploading = data.config_uploading self.config_anonbrowse = data.config_anonbrowse self.config_public_reg = data.config_public_reg @@ -438,10 +442,13 @@ class Config: self.config_google_oauth_client_secret = data.config_google_oauth_client_secret self.config_mature_content_tags = data.config_mature_content_tags or u'' self.config_logfile = data.config_logfile or u'' + self.config_access_logfile = data.config_access_logfile or u'' self.config_rarfile_location = data.config_rarfile_location self.config_theme = data.config_theme self.config_updatechannel = data.config_updatechannel - logger.setup(self.config_logfile, self.config_log_level) + logger.setup(self.config_logfile, "general", self.config_log_level) + if self.config_access_log: + logger.setup("access.log", "access", logger.DEFAULT_ACCESS_LEVEL) @property def get_update_channel(self): @@ -465,6 +472,21 @@ class Config: else: return self.config_keyfile + def get_config_ipaddress(self, readable=False): + if not readable: + if cli.ipadress: + return cli.ipadress + else: + return "" + else: + answer="0.0.0.0" + if cli.ipadress: + if cli.ipv6: + answer = "["+cli.ipadress+"]" + else: + answer = cli.ipadress + return answer + def _has_role(self, role_flag): return constants.has_flag(self.config_default_role, role_flag) @@ -588,8 +610,10 @@ def migrate_Database(): conn.execute("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang " "+ series_books * :side_series + category_books * :side_category + hot_books * " ":side_hot + :side_autor + :detail_random)" - ,{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE, 'side_series': constants.SIDEBAR_SERIES, - 'side_category': constants.SIDEBAR_CATEGORY, 'side_hot': constants.SIDEBAR_HOT, 'side_autor': constants.SIDEBAR_AUTHOR, + ,{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE, + 'side_series': constants.SIDEBAR_SERIES, + 'side_category': constants.SIDEBAR_CATEGORY, 'side_hot': constants.SIDEBAR_HOT, + 'side_autor': constants.SIDEBAR_AUTHOR, 'detail_random': constants.DETAIL_RANDOM}) session.commit() try: @@ -672,6 +696,13 @@ def migrate_Database(): conn = engine.connect() conn.execute("ALTER TABLE Settings ADD column `config_updatechannel` INTEGER DEFAULT 0") session.commit() + try: + session.query(exists().where(Settings.config_access_log)).scalar() + except exc.OperationalError: # Database is not compatible, some rows are missing + conn = engine.connect() + conn.execute("ALTER TABLE Settings ADD column `config_access_log` INTEGER DEFAULT 0") + conn.execute("ALTER TABLE Settings ADD column `config_access_logfile` String DEFAULT ''") + session.commit() # Remove login capability of user Guest conn = engine.connect() diff --git a/cps/updater.py b/cps/updater.py index 3b679a51..2c524c48 100644 --- a/cps/updater.py +++ b/cps/updater.py @@ -36,11 +36,10 @@ from flask_babel import gettext as _ from . import constants, logger, config, get_locale, Server -log = logger.create() +# log = logger.create() _REPOSITORY_API_URL = 'https://api.github.com/repos/janeczku/calibre-web' - def is_sha1(sha1): if len(sha1) != 40: return False @@ -73,59 +72,59 @@ class Updater(threading.Thread): def run(self): try: self.status = 1 - log.debug(u'Download update file') + logger.debug(u'Download update file') headers = {'Accept': 'application/vnd.github.v3+json'} r = requests.get(self._get_request_path(), stream=True, headers=headers) r.raise_for_status() self.status = 2 - log.debug(u'Opening zipfile') + logger.debug(u'Opening zipfile') z = zipfile.ZipFile(BytesIO(r.content)) self.status = 3 - log.debug(u'Extracting zipfile') + logger.debug(u'Extracting zipfile') tmp_dir = gettempdir() z.extractall(tmp_dir) foldername = os.path.join(tmp_dir, z.namelist()[0])[:-1] if not os.path.isdir(foldername): self.status = 11 - log.info(u'Extracted contents of zipfile not found in temp folder') + logger.error(u'Extracted contents of zipfile not found in temp folder') return self.status = 4 - log.debug(u'Replacing files') + logger.debug(u'Replacing files') self.update_source(foldername, constants.BASE_DIR) self.status = 6 - log.debug(u'Preparing restart of server') + logger.debug(u'Preparing restart of server') time.sleep(2) Server.setRestartTyp(True) Server.stopServer() self.status = 7 time.sleep(2) except requests.exceptions.HTTPError as ex: - log.info(u'HTTP Error %s', ex) + logger.info(u'HTTP Error %s', ex) self.status = 8 except requests.exceptions.ConnectionError: - log.info(u'Connection error') + logger.info(u'Connection error') self.status = 9 except requests.exceptions.Timeout: - log.info(u'Timeout while establishing connection') + logger.info(u'Timeout while establishing connection') self.status = 10 except requests.exceptions.RequestException: self.status = 11 - log.info(u'General error') + logger.info(u'General error') def get_update_status(self): return self.status @classmethod - def file_to_list(self, filelist): + def file_to_list(cls, filelist): return [x.strip() for x in open(filelist, 'r') if not x.startswith('#EXT')] @classmethod - def one_minus_two(self, one, two): + def one_minus_two(cls, one, two): return [x for x in one if x not in set(two)] @classmethod - def reduce_dirs(self, delete_files, new_list): + def reduce_dirs(cls, delete_files, new_list): new_delete = [] for filename in delete_files: parts = filename.split(os.sep) @@ -146,7 +145,7 @@ class Updater(threading.Thread): return list(set(new_delete)) @classmethod - def reduce_files(self, remove_items, exclude_items): + def reduce_files(cls, remove_items, exclude_items): rf = [] for item in remove_items: if not item.startswith(exclude_items): @@ -154,44 +153,41 @@ class Updater(threading.Thread): return rf @classmethod - def moveallfiles(self, root_src_dir, root_dst_dir): + def moveallfiles(cls, root_src_dir, root_dst_dir): change_permissions = True + new_permissions = os.stat(root_dst_dir) if sys.platform == "win32" or sys.platform == "darwin": change_permissions = False else: - log.debug('Update on OS-System : %s', sys.platform) - new_permissions = os.stat(root_dst_dir) - # print new_permissions + logger.debug('Update on OS-System : %s', sys.platform) for src_dir, __, files in os.walk(root_src_dir): dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1) if not os.path.exists(dst_dir): os.makedirs(dst_dir) - log.debug('Create-Dir: %s', dst_dir) + logger.debug('Create-Dir: %s', dst_dir) if change_permissions: # print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid)) os.chown(dst_dir, new_permissions.st_uid, new_permissions.st_gid) for file_ in files: src_file = os.path.join(src_dir, file_) dst_file = os.path.join(dst_dir, file_) + permission = os.stat(dst_file) if os.path.exists(dst_file): - if change_permissions: - permission = os.stat(dst_file) - log.debug('Remove file before copy: %s', dst_file) + logger.debug('Remove file before copy: %s', dst_file) os.remove(dst_file) else: if change_permissions: permission = new_permissions shutil.move(src_file, dst_dir) - log.debug('Move File %s to %s', src_file, dst_dir) + logger.debug('Move File %s to %s', src_file, dst_dir) if change_permissions: try: os.chown(dst_file, permission.st_uid, permission.st_gid) - except (Exception) as e: - # ex = sys.exc_info() + except OSError as e: old_permissions = os.stat(dst_file) - log.debug('Fail change permissions of %s. Before: %s:%s After %s:%s error: %s', - dst_file, old_permissions.st_uid, old_permissions.st_gid, - permission.st_uid, permission.st_gid, e) + logger.debug('Fail change permissions of %s. Before: %s:%s After %s:%s error: %s', + dst_file, old_permissions.st_uid, old_permissions.st_gid, + permission.st_uid, permission.st_gid, e) return def update_source(self, source, destination): @@ -199,7 +195,7 @@ class Updater(threading.Thread): old_list = list() exclude = ( os.sep + 'app.db', os.sep + 'calibre-web.log1', os.sep + 'calibre-web.log2', os.sep + 'gdrive.db', - os.sep + 'vendor', os.sep + 'calibre-web.log', os.sep + '.git', os.sep +'client_secrets.json', + os.sep + 'vendor', os.sep + 'calibre-web.log', os.sep + '.git', os.sep + 'client_secrets.json', os.sep + 'gdrive_credentials', os.sep + 'settings.yaml') for root, dirs, files in os.walk(destination, topdown=True): for name in files: @@ -225,37 +221,32 @@ class Updater(threading.Thread): for item in remove_items: item_path = os.path.join(destination, item[1:]) if os.path.isdir(item_path): - log.debug("Delete dir %s", item_path) + logger.debug("Delete dir %s", item_path) shutil.rmtree(item_path, ignore_errors=True) else: try: - log.debug("Delete file %s", item_path) + logger.debug("Delete file %s", item_path) # log_from_thread("Delete file " + item_path) os.remove(item_path) - except Exception: - log.debug("Could not remove: %s", item_path) + except OSError: + logger.debug("Could not remove: %s", item_path) shutil.rmtree(source, ignore_errors=True) @classmethod - def _nightly_version_info(self): - content = {} - content[0] = '$Format:%H$' - content[1] = '$Format:%cI$' - # content[0] = 'bb7d2c6273ae4560e83950d36d64533343623a57' - # content[1] = '2018-09-09T10:13:08+02:00' - if is_sha1(content[0]) and len(content[1]) > 0: - return {'version': content[0], 'datetime': content[1]} + def _nightly_version_info(cls): + if is_sha1(constants.NIGHTLY_VERSION[0]) and len(constants.NIGHTLY_VERSION[1]) > 0: + return {'version': constants.NIGHTLY_VERSION[0], 'datetime': constants.NIGHTLY_VERSION[1]} return False @classmethod - def _stable_version_info(self): + def _stable_version_info(cls): return constants.STABLE_VERSION # Current version def _nightly_available_updates(self, request_method): tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone) if request_method == "GET": repository_url = _REPOSITORY_API_URL - status, commit = self._load_remote_data(repository_url +'/git/refs/heads/master') + status, commit = self._load_remote_data(repository_url + '/git/refs/heads/master') parents = [] if status['message'] != '': return json.dumps(status) @@ -334,7 +325,7 @@ class Updater(threading.Thread): parent_commit_date, format='short', locale=get_locale()) parents.append([parent_commit_date, - parent_data['message'].replace('\r\n','

').replace('\n','

')]) + parent_data['message'].replace('\r\n', '

').replace('\n', '

')]) parent_commit = parent_data['parents'][0] remaining_parents_cnt -= 1 except Exception: @@ -398,7 +389,7 @@ class Updater(threading.Thread): if (minor_version_update == current_version[1] and patch_version_update > current_version[2]) or \ minor_version_update > current_version[1]: - parents.append([commit[i]['tag_name'],commit[i]['body'].replace('\r\n', '

')]) + parents.append([commit[i]['tag_name'], commit[i]['body'].replace('\r\n', '

')]) i -= 1 continue if major_version_update < current_version[0]: @@ -426,7 +417,7 @@ class Updater(threading.Thread): u'update to version: %(version)s', version=commit[i]['tag_name']), 'history': parents }) - self.updateFile = commit[i +1]['zipball_url'] + self.updateFile = commit[i+1]['zipball_url'] break if i == -1: status.update({ diff --git a/cps/uploader.py b/cps/uploader.py index 17816d0a..2b47ed1d 100644 --- a/cps/uploader.py +++ b/cps/uploader.py @@ -28,7 +28,7 @@ from . import logger, comic from .constants import BookMeta -log = logger.create() +# log = logger.create() try: @@ -42,7 +42,7 @@ try: from wand.exceptions import PolicyError use_generic_pdf_cover = False except (ImportError, RuntimeError) as e: - log.warning('cannot import Image, generating pdf covers for pdf uploads will not work: %s', e) + logger.warning('cannot import Image, generating pdf covers for pdf uploads will not work: %s', e) use_generic_pdf_cover = True try: @@ -50,21 +50,21 @@ try: from PyPDF2 import __version__ as PyPdfVersion use_pdf_meta = True except ImportError as e: - log.warning('cannot import PyPDF2, extracting pdf metadata will not work: %s', e) + logger.warning('cannot import PyPDF2, extracting pdf metadata will not work: %s', e) use_pdf_meta = False try: from . import epub use_epub_meta = True except ImportError as e: - log.warning('cannot import epub, extracting epub metadata will not work: %s', e) + logger.warning('cannot import epub, extracting epub metadata will not work: %s', e) use_epub_meta = False try: from . import fb2 use_fb2_meta = True except ImportError as e: - log.warning('cannot import fb2, extracting fb2 metadata will not work: %s', e) + logger.warning('cannot import fb2, extracting fb2 metadata will not work: %s', e) use_fb2_meta = False try: @@ -72,7 +72,7 @@ try: from PIL import __version__ as PILversion use_PIL = True except ImportError as e: - log.warning('cannot import Pillow, using png and webp images as cover will not work: %s', e) + logger.warning('cannot import Pillow, using png and webp images as cover will not work: %s', e) use_generic_pdf_cover = True use_PIL = False @@ -94,7 +94,7 @@ def process(tmp_file_path, original_file_name, original_file_extension): meta = comic.get_comic_info(tmp_file_path, original_file_name, original_file_extension) except Exception as ex: - log.warning('cannot parse metadata, using default: %s', ex) + logger.warning('cannot parse metadata, using default: %s', ex) if meta and meta.title.strip() and meta.author.strip(): return meta @@ -198,10 +198,10 @@ def pdf_preview(tmp_file_path, tmp_dir): img.save(filename=os.path.join(tmp_dir, cover_file_name)) return cover_file_name except PolicyError as ex: - log.warning('Pdf extraction forbidden by Imagemagick policy: %s', ex) + logger.warning('Pdf extraction forbidden by Imagemagick policy: %s', ex) return None except Exception as ex: - log.warning('Cannot extract cover image, using default: %s', ex) + logger.warning('Cannot extract cover image, using default: %s', ex) return None diff --git a/cps/web.py b/cps/web.py index 2f1f047b..ee123877 100644 --- a/cps/web.py +++ b/cps/web.py @@ -105,7 +105,7 @@ for ex in default_exceptions: web = Blueprint('web', __name__) -log = logger.create() +# log = logger.create() # ################################### Login logic and rights management ############################################### @@ -308,7 +308,7 @@ def toggle_read(book_id): db.session.add(new_cc) db.session.commit() except KeyError: - log.error(u"Custom Column No.%d is not exisiting in calibre database", config.config_read_column) + logger.error(u"Custom Column No.%d is not exisiting in calibre database", config.config_read_column) return "" ''' @@ -331,10 +331,10 @@ def get_comic_book(book_id, book_format, page): extract = lambda page: rf.read(names[page]) except: # rarfile not valid - log.error('Unrar binary not found, or unable to decompress file %s', cbr_file) + logger.error('Unrar binary not found, or unable to decompress file %s', cbr_file) return "", 204 else: - log.info('Unrar is not supported please install python rarfile extension') + logger.info('Unrar is not supported please install python rarfile extension') # no support means return nothing return "", 204 elif book_format in ("cbz", "zip"): @@ -346,7 +346,7 @@ def get_comic_book(book_id, book_format, page): names=sort(tf.getnames()) extract = lambda page: tf.extractfile(names[page]).read() else: - log.error('unsupported comic format') + logger.error('unsupported comic format') return "", 204 if sys.version_info.major >= 3: @@ -447,6 +447,7 @@ def index(page): @web.route('//', defaults={'page': 1, 'book_id': "1"}) +@web.route('///', defaults={'page': 1, 'book_id': "1"}) @web.route('///', defaults={'page': 1}) @web.route('////') @login_required_if_no_ano @@ -470,14 +471,14 @@ def books_list(data, sort, book_id, page): entries, random, pagination = fill_indexpage(page, db.Books, db.Books.ratings.any(db.Ratings.rating > 9), order) return render_title_template('index.html', random=random, entries=entries, pagination=pagination, - title=_(u"Best rated books"), page="rated") + id=book_id, title=_(u"Best rated books"), page="rated") else: abort(404) elif data == "discover": if current_user.check_visibility(constants.SIDEBAR_RANDOM): entries, __, pagination = fill_indexpage(page, db.Books, True, [func.randomblob(2)]) pagination = Pagination(1, config.config_books_per_page, config.config_books_per_page) - return render_title_template('discover.html', entries=entries, pagination=pagination, + return render_title_template('discover.html', entries=entries, pagination=pagination, id=book_id, title=_(u"Random Books"), page="discover") else: abort(404) @@ -513,8 +514,8 @@ def render_hot_books(page): else: random = false() off = int(int(config.config_books_per_page) * (page - 1)) - all_books = ub.session.query(ub.Downloads, ub.func.count(ub.Downloads.book_id)).order_by( - ub.func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id) + all_books = ub.session.query(ub.Downloads, func.count(ub.Downloads.book_id)).order_by( + func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id) hot_books = all_books.offset(off).limit(config.config_books_per_page) entries = list() for book in hot_books: @@ -534,11 +535,11 @@ def render_hot_books(page): abort(404) -# ToDo wrong order function def render_author_books(page, book_id, order): entries, __, pagination = fill_indexpage(page, db.Books, db.Books.authors.any(db.Authors.id == book_id), - [db.Series.name, db.Books.series_index, order[0]], db.books_series_link, db.Series) - if entries is None: + [order[0], db.Series.name, db.Books.series_index], + db.books_series_link, db.Series) + if entries is None or not len(entries): flash(_(u"Error opening eBook. File does not exist or file is not accessible:"), category="error") return redirect(url_for("web.index")) @@ -553,10 +554,11 @@ def render_author_books(page, book_id, order): other_books = get_unique_other_books(entries.all(), author_info.books) except Exception: # Skip goodreads, if site is down/inaccessible - log.error('Goodreads website is down/inaccessible') + logger.error('Goodreads website is down/inaccessible') - return render_title_template('author.html', entries=entries, pagination=pagination, - title=name, author=author_info, other_books=other_books, page="author") + return render_title_template('author.html', entries=entries, pagination=pagination, id=book_id, + title=_(u"Author: %(name)s", name=name), author=author_info, other_books=other_books, + page="author") def render_publisher_books(page, book_id, order): @@ -564,8 +566,9 @@ def render_publisher_books(page, book_id, order): if publisher: entries, random, pagination = fill_indexpage(page, db.Books, db.Books.publishers.any(db.Publishers.id == book_id), - [db.Books.series_index, order[0]]) - return render_title_template('index.html', random=random, entries=entries, pagination=pagination, + [db.Series.name, order[0], db.Books.series_index], + db.books_series_link, db.Series) + return render_title_template('index.html', random=random, entries=entries, pagination=pagination, id=book_id, title=_(u"Publisher: %(name)s", name=publisher.name), page="publisher") else: abort(404) @@ -934,7 +937,7 @@ def render_read_books(page, are_read, as_xml=False, order=None): .filter(db.cc_classes[config.config_read_column].value is True).all() readBookIds = [x.book for x in readBooks] except KeyError: - log.error("Custom Column No.%d is not existing in calibre database", config.config_read_column) + logger.error("Custom Column No.%d is not existing in calibre database", config.config_read_column) readBookIds = [] if are_read: @@ -977,7 +980,7 @@ def serve_book(book_id, book_format): book = db.session.query(db.Books).filter(db.Books.id == book_id).first() data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == book_format.upper())\ .first() - log.info('Serving book: %s', data.name) + logger.info('Serving book: %s', data.name) if config.config_use_google_drive: headers = Headers() try: @@ -1060,7 +1063,7 @@ def register(): return render_title_template('register.html', title=_(u"register"), page="register") else: flash(_(u"Your e-mail is not allowed to register"), category="error") - log.info('Registering failed for user "%s" e-mail adress: %s', to_save['nickname'], to_save["email"]) + logger.info('Registering failed for user "%s" e-mail adress: %s', to_save['nickname'], to_save["email"]) return render_title_template('register.html', title=_(u"register"), page="register") flash(_(u"Confirmation e-mail was send to your e-mail account."), category="success") return redirect(url_for('web.login')) @@ -1092,10 +1095,10 @@ def login(): return redirect_back(url_for("web.index")) except ldap.INVALID_CREDENTIALS: ipAdress = request.headers.get('X-Forwarded-For', request.remote_addr) - log.info('LDAP Login failed for user "%s" IP-adress: %s', form['username'], ipAdress) + logger.info('LDAP Login failed for user "%s" IP-adress: %s', form['username'], ipAdress) flash(_(u"Wrong Username or Password"), category="error") except ldap.SERVER_DOWN: - log.info('LDAP Login failed, LDAP Server down') + logger.info('LDAP Login failed, LDAP Server down') flash(_(u"Could not login. LDAP server down, please contact your administrator"), category="error") else: if user and check_password_hash(user.password, form['password']) and user.nickname is not "Guest": @@ -1104,7 +1107,7 @@ def login(): return redirect_back(url_for("web.index")) else: ipAdress = request.headers.get('X-Forwarded-For', request.remote_addr) - log.info('Login failed for user "%s" IP-adress: %s', form['username'], ipAdress) + logger.info('Login failed for user "%s" IP-adress: %s', form['username'], ipAdress) flash(_(u"Wrong Username or Password"), category="error") next_url = url_for('web.index') @@ -1345,7 +1348,7 @@ def show_book(book_id): matching_have_read_book = getattr(entries, 'custom_column_'+str(config.config_read_column)) have_read = len(matching_have_read_book) > 0 and matching_have_read_book[0].value except KeyError: - log.error("Custom Column No.%d is not exisiting in calibre database", config.config_read_column) + logger.error("Custom Column No.%d is not exisiting in calibre database", config.config_read_column) have_read = None else: diff --git a/cps/worker.py b/cps/worker.py index a33ad99d..760f1a0a 100644 --- a/cps/worker.py +++ b/cps/worker.py @@ -48,7 +48,7 @@ from . import logger, config, db, gdriveutils from .subproc_wrapper import process_open -log = logger.create() +# log = logger.create() chunksize = 8192 # task 'status' consts @@ -90,8 +90,8 @@ def get_attachment(bookpath, filename): data = file_.read() file_.close() except IOError as e: - log.exception(e) # traceback.print_exc() - log.error(u'The requested file could not be read. Maybe wrong permissions?') + logger.exception(e) # traceback.print_exc() + logger.error(u'The requested file could not be read. Maybe wrong permissions?') return None attachment = MIMEBase('application', 'octet-stream') @@ -116,7 +116,7 @@ class emailbase(): def send(self, strg): """Send `strg' to the server.""" - log.debug('send: %r', strg[:300]) + logger.debug('send: %r', strg[:300]) if hasattr(self, 'sock') and self.sock: try: if self.transferSize: @@ -142,7 +142,7 @@ class emailbase(): @classmethod def _print_debug(self, *args): - log.debug(args) + logger.debug(args) def getTransferStatus(self): if self.transferSize: @@ -257,14 +257,14 @@ class WorkerThread(threading.Thread): # if it does - mark the conversion task as complete and return a success # this will allow send to kindle workflow to continue to work if os.path.isfile(file_path + format_new_ext): - log.info("Book id %d already converted to %s", bookid, format_new_ext) + logger.info("Book id %d already converted to %s", bookid, format_new_ext) cur_book = db.session.query(db.Books).filter(db.Books.id == bookid).first() self.queue[self.current]['path'] = file_path self.queue[self.current]['title'] = cur_book.title self._handleSuccess() return file_path + format_new_ext else: - log.info("Book id %d - target format of %s does not exist. Moving forward with convert.", bookid, format_new_ext) + logger.info("Book id %d - target format of %s does not exist. Moving forward with convert.", bookid, format_new_ext) # check if converter-executable is existing if not os.path.exists(config.config_converterpath): @@ -320,13 +320,13 @@ class WorkerThread(threading.Thread): if conv_error: error_message = _(u"Kindlegen failed with Error %(error)s. Message: %(message)s", error=conv_error.group(1), message=conv_error.group(2).strip()) - log.debug("convert_kindlegen: %s", nextline) + logger.debug("convert_kindlegen: %s", nextline) else: while p.poll() is None: nextline = p.stdout.readline() if os.name == 'nt' and sys.version_info < (3, 0): nextline = nextline.decode('windows-1252') - log.debug(nextline.strip('\r\n')) + logger.debug(nextline.strip('\r\n')) # parse progress string from calibre-converter progress = re.search("(\d+)%\s.*", nextline) if progress: @@ -356,7 +356,7 @@ class WorkerThread(threading.Thread): return file_path + format_new_ext else: error_message = format_new_ext.upper() + ' format not found on disk' - log.info("ebook converter failed with error while converting book") + logger.info("ebook converter failed with error while converting book") if not error_message: error_message = 'Ebook converter failed with unknown error' self._handleError(error_message) @@ -460,7 +460,7 @@ class WorkerThread(threading.Thread): self.asyncSMTP = email(obj['settings']["mail_server"], obj['settings']["mail_port"], timeout) # link to logginglevel - if logger.is_debug_enabled(): + if logger.is_debug_enabled('general'): self.asyncSMTP.set_debuglevel(1) if use_ssl == 1: self.asyncSMTP.starttls() @@ -502,7 +502,7 @@ class WorkerThread(threading.Thread): return retVal def _handleError(self, error_message): - log.error(error_message) + logger.error(error_message) self.UIqueue[self.current]['stat'] = STAT_FAIL self.UIqueue[self.current]['progress'] = "100 %" self.UIqueue[self.current]['runtime'] = self._formatRuntime(