log.py 5.33 KB
Newer Older
崔为之's avatar
崔为之 committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
# @Version     : Python 3.11.4
# @Software    : Sublime Text 4
# @Author      : StudentCWZ
# @Email       : StudentCWZ@outlook.com
# @Date        : 2023/11/20 09:57
# @File        : log.py
# @Description :
"""


from loguru import logger

from application.utils import ElasticsearchUtil
from application.libs.helper import MySQLHelper
from application.utils import ParseUtil
from application.models import Log


class LogDao:
    """
    Data Access Object for logs.
    This class includes methods to get data from Elasticsearch, process it and save it to MySQL.
    """

    @classmethod
    def get_data_from_es(cls, index=None, dsl=None, sid=None) -> dict:
        """
        Get data from Elasticsearch by either scroll searching or direct searching.

        Args:
            index (str): The name of the Elasticsearch index.
            dsl (dict): The DSL query for Elasticsearch.
            sid (str): The scroll id for Elasticsearch scroll search.

        Returns:
            dict: The data returned from Elasticsearch.

        Raises:
            SystemError: If none of the required parameters are provided.
        """
        if sid is not None:
            return ElasticsearchUtil.scroll_search(sid)
        elif index is not None and dsl is not None:
            return ElasticsearchUtil.search(index, dsl)
        else:
            raise SystemError('Could not get data from Elasticsearch')

    @classmethod
    def get_mdata(cls, data: dict) -> list:
        """
        Get metadata from the data returned by Elasticsearch.

        Args:
            data (dict): The data returned from Elasticsearch.

        Returns:
            list: The metadata extracted from the data.

        Raises:
            SystemError: If the metadata is empty.
        """
        mdata = data.get('hits').get('hits')
        if not mdata:
            logger.error('the mdata is an empty list ...')
            raise SystemError('the mdata is an empty list ...')
        return mdata

    @classmethod
    def get_intent_from_mysql(cls, sql: str, cfg: dict) -> list:
        """
        Get the intent mapping from MySQL using the provided SQL.

        Args:
            sql (str): The SQL query to execute.
            cfg (dict): The configuration for MySQL.

        Returns:
            list: The intent mapping list.
        """
        with MySQLHelper(**cfg) as helper:
            result = helper.execute(sql)
        mapping_list = [item[0] for item in result]
        return mapping_list

    @classmethod
    def process_and_save_data(cls, lst: list, mapping_list: list):
        """
        Process the given list using the mapping list and save the result to the database.

        Args:
            lst (list): The list to process.
            mapping_list (list): The mapping list to use for processing.
        """
        if not lst:
            return
        result_generator = ParseUtil(mapping_list=mapping_list).filter(lst)
        _ = Log.batch_save(result_generator)

    @classmethod
    def parse(cls, start: str, end: str, index: str, sql: str, cfg: dict) -> int:
        """
        Parse logs from Elasticsearch and save them to MySQL.

        Args:
            start (str): The start date for the logs.
            end (str): The end date for the logs.
            index (str): The Elasticsearch index to get logs from.
            sql (str): The SQL query to get the intent mapping from MySQL.
            cfg (dict): The configuration for MySQL.

        Returns:
            int: The total number of logs parsed.

        Raises:
            SystemError: If there is an error during the process.
        """

        # Get the DSL for the given start and end dates.
        dsl = ElasticsearchUtil.dsl(start, end)

        # Get data from Elasticsearch.
        data = cls.get_data_from_es(index=index, dsl=dsl)

        # Extract metadata from the data.
        mdata = cls.get_mdata(data)

        # Get the total number of logs.
        total = data.get('hits').get('total').get('value')
        logger.debug(f'The numbers of data by searching data from ES: {total}')

        # Log the start of the searching and saving process.
        logger.debug('The data is inserting ...')

        # Get the intent mapping from MySQL.
        mapping_list = cls.get_intent_from_mysql(sql, cfg)

        # Process and save the metadata.
        cls.process_and_save_data(mdata, mapping_list)

        # Get the scroll id for scroll searching in Elasticsearch.
        scroll_id = data.get('_scroll_id')

        try:
            for _ in range(0, int(total / dsl.get('size', 10000) + 1)):
                # Get more data from Elasticsearch using scroll searching.
                res = cls.get_data_from_es(sid=scroll_id)
                lst = res.get('hits').get('hits')

                if not lst:
                    continue

                # Process and save the data.
                cls.process_and_save_data(lst, mapping_list)
        except Exception as e:
            # Log the error and raise a SystemError.
            logger.error(f'The error: {e}')
            raise SystemError()
        else:
            # Log the success of the process.
            logger.debug('The process of inserting data succeed!')
        finally:
            # Log the end of the process.
            logger.debug('The inserting of the data finished!')

        return total