#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ # @Version : Python 3.11.4 # @Software : Sublime Text 4 # @Author : StudentCWZ # @Email : StudentCWZ@outlook.com # @Date : 2023/11/19 16:02 # @File : log.py # @Description : Defines the Log services for the application. """ from flask import current_app from loguru import logger from application.dao import LogDao from application.schemas import ParseLogRequestItem class LogService: @classmethod def parse(cls, item: ParseLogRequestItem) -> int: """ Parse logs from Elasticsearch and process them according to the configured SQL commands. Args: item: The item containing the start and end dates for which logs are to be parsed. Returns: The result of the log parsing as an integer. Raises: ServerError: If there is an issue with the process. """ # Retrieve the start and end date from the given item. start_date = item.start end_date = item.end # Log the time interval for debugging purposes. logger.debug(f'The interval of time is between {start_date} and {end_date}...') # Retrieve the Elasticsearch index from the current application's configuration. index = current_app.config.Elasticsearch.Index # Retrieve the MySQL configuration from the current application's configuration. cfg = current_app.config.ExtraDB # Retrieve the SQL to be executed from the MySQL configuration. sql = cfg.Sql # Construct a configuration dictionary for the MySQL database. options = { 'host': cfg.Host, 'user': cfg.User, 'password': cfg.Password, 'db': cfg.DB, 'port': cfg.Port, } # Parse the logs using the LogDao class and return the result. return LogDao.parse(start_date, end_date, index, sql, options)