From cb7faa575de173aede6491330134c2ffa0f25716 Mon Sep 17 00:00:00 2001 From: jlao Date: Fri, 5 Apr 2024 12:39:01 +0800 Subject: [PATCH] Enable i18n, add user / username field, pylint styling --- drf_api_logger/admin.py | 48 +++++----- drf_api_logger/apps.py | 4 +- drf_api_logger/insert_log_into_database.py | 30 +++---- .../middleware/api_logger_middleware.py | 90 ++++++++++--------- drf_api_logger/models.py | 32 ++++--- 5 files changed, 106 insertions(+), 98 deletions(-) diff --git a/drf_api_logger/admin.py b/drf_api_logger/admin.py index 7085acd..83ea652 100644 --- a/drf_api_logger/admin.py +++ b/drf_api_logger/admin.py @@ -8,7 +8,7 @@ from drf_api_logger.utils import database_log_enabled if database_log_enabled(): - from drf_api_logger.models import APILogsModel + from drf_api_logger.models import APILogs from django.utils.translation import gettext_lazy as _ import csv @@ -24,14 +24,14 @@ def export_as_csv(self, request, queryset): writer.writerow(field_names) for obj in queryset: - row = writer.writerow([getattr(obj, field) for field in field_names]) + writer.writerow([getattr(obj, field) for field in field_names]) return response - export_as_csv.short_description = "Export Selected" + export_as_csv.short_description = _("Export selected") class SlowAPIsFilter(admin.SimpleListFilter): - title = _('API Performance') + title = _('API performance') # Parameter for the filter that will be used in the URL query. parameter_name = 'api_performance' @@ -39,8 +39,8 @@ class SlowAPIsFilter(admin.SimpleListFilter): def __init__(self, request, params, model, model_admin): super().__init__(request, params, model, model_admin) if hasattr(settings, 'DRF_API_LOGGER_SLOW_API_ABOVE'): - if type(settings.DRF_API_LOGGER_SLOW_API_ABOVE) == int: # Making sure for integer value. - self._DRF_API_LOGGER_SLOW_API_ABOVE = settings.DRF_API_LOGGER_SLOW_API_ABOVE / 1000 # Converting to seconds. + if type(settings.DRF_API_LOGGER_SLOW_API_ABOVE) == int: # Ensuring integer value + self._slow_api_above = settings.DRF_API_LOGGER_SLOW_API_ABOVE / 1000 # Convert to seconds def lookups(self, request, model_admin): """ @@ -50,8 +50,8 @@ def lookups(self, request, model_admin): human-readable name for the option that will appear in the right sidebar. """ - slow = 'Slow' - fast = 'Fast' + slow = _('Slow') + fast = _('Fast') if hasattr(settings, 'DRF_API_LOGGER_SLOW_API_ABOVE'): slow += ', >={}ms'.format(settings.DRF_API_LOGGER_SLOW_API_ABOVE) fast += ', <{}ms'.format(settings.DRF_API_LOGGER_SLOW_API_ABOVE) @@ -69,9 +69,9 @@ def queryset(self, request, queryset): """ # to decide how to filter the queryset. if self.value() == 'slow': - return queryset.filter(execution_time__gte=self._DRF_API_LOGGER_SLOW_API_ABOVE) + return queryset.filter(execution_time__gte=self._slow_api_above) if self.value() == 'fast': - return queryset.filter(execution_time__lt=self._DRF_API_LOGGER_SLOW_API_ABOVE) + return queryset.filter(execution_time__lt=self._slow_api_above) return queryset @@ -81,33 +81,33 @@ class APILogsAdmin(admin.ModelAdmin, ExportCsvMixin): def __init__(self, model, admin_site): super().__init__(model, admin_site) - self._DRF_API_LOGGER_TIMEDELTA = 0 + self._timedelta = 0 if hasattr(settings, 'DRF_API_LOGGER_SLOW_API_ABOVE'): if type(settings.DRF_API_LOGGER_SLOW_API_ABOVE) == int: # Making sure for integer value. self.list_filter += (SlowAPIsFilter,) if hasattr(settings, 'DRF_API_LOGGER_TIMEDELTA'): if type(settings.DRF_API_LOGGER_TIMEDELTA) == int: # Making sure for integer value. - self._DRF_API_LOGGER_TIMEDELTA = settings.DRF_API_LOGGER_TIMEDELTA + self._timedelta = settings.DRF_API_LOGGER_TIMEDELTA - def added_on_time(self, obj): - return (obj.added_on + timedelta(minutes=self._DRF_API_LOGGER_TIMEDELTA)).strftime("%d %b %Y %H:%M:%S") + def time_display(self, obj): + return (obj.timestamp + timedelta(minutes=self._timedelta)).strftime("%d %b %Y %H:%M:%S") - added_on_time.admin_order_field = 'added_on' - added_on_time.short_description = 'Added on' + time_display.admin_order_field = 'timestamp' + time_display.short_description = _('Timestamp') list_per_page = 20 - list_display = ('id', 'api', 'method', 'status_code', 'execution_time', 'added_on_time',) - list_filter = ('added_on', 'status_code', 'method',) + list_display = ('api', 'user', 'method', 'status_code', 'execution_time', 'time_display',) + list_filter = ('timestamp', 'status_code', 'method',) search_fields = ('body', 'response', 'headers', 'api',) readonly_fields = ( - 'execution_time', 'client_ip_address', 'api', - 'headers', 'body', 'method', 'response', 'status_code', 'added_on_time', + 'user', 'execution_time', 'client_ip_address', 'api', + 'headers', 'body', 'method', 'response', 'status_code', 'time_display', ) - exclude = ('added_on',) + exclude = ('timestamp',) change_list_template = 'charts_change_list.html' change_form_template = 'change_form.html' - date_hierarchy = 'added_on' + date_hierarchy = 'timestamp' def changelist_view(self, request, extra_context=None): response = super(APILogsAdmin, self).changelist_view(request, extra_context) @@ -115,7 +115,7 @@ def changelist_view(self, request, extra_context=None): filtered_query_set = response.context_data["cl"].queryset except: return response - analytics_model = filtered_query_set.values('added_on__date').annotate(total=Count('id')).order_by('total') + analytics_model = filtered_query_set.values('timestamp__date').annotate(total=Count('id')).order_by('total') status_code_count_mode = filtered_query_set.values('id').values('status_code').annotate( total=Count('id')).order_by('status_code') status_code_count_keys = list() @@ -153,4 +153,4 @@ def has_change_permission(self, request, obj=None): return False - admin.site.register(APILogsModel, APILogsAdmin) + admin.site.register(APILogs, APILogsAdmin) diff --git a/drf_api_logger/apps.py b/drf_api_logger/apps.py index b9638da..f9e7118 100644 --- a/drf_api_logger/apps.py +++ b/drf_api_logger/apps.py @@ -1,6 +1,8 @@ from django.apps import AppConfig +from django.utils.translation import gettext_lazy as _ class LoggerConfig(AppConfig): name = 'drf_api_logger' - verbose_name = 'DRF API Logger' + verbose_name = _('DRF API Logger') + verbose_name_plural = _('DRF API Logger') diff --git a/drf_api_logger/insert_log_into_database.py b/drf_api_logger/insert_log_into_database.py index c96d542..2c06f33 100644 --- a/drf_api_logger/insert_log_into_database.py +++ b/drf_api_logger/insert_log_into_database.py @@ -1,10 +1,10 @@ from queue import Queue import time -from django.conf import settings from threading import Thread +from django.conf import settings from django.db.utils import OperationalError -from drf_api_logger.models import APILogsModel +from drf_api_logger.models import APILogs class InsertLogIntoDatabase(Thread): @@ -12,44 +12,44 @@ class InsertLogIntoDatabase(Thread): def __init__(self): super().__init__() - self.DRF_API_LOGGER_DEFAULT_DATABASE = 'default' + self.default_database = 'default' if hasattr(settings, 'DRF_API_LOGGER_DEFAULT_DATABASE'): - self.DRF_API_LOGGER_DEFAULT_DATABASE = settings.DRF_API_LOGGER_DEFAULT_DATABASE + self.default_database = settings.DRF_API_LOGGER_DEFAULT_DATABASE - self.DRF_LOGGER_QUEUE_MAX_SIZE = 50 # Default queue size 50 + self.queue_max_size = 50 # Default queue size 50 if hasattr(settings, 'DRF_LOGGER_QUEUE_MAX_SIZE'): - self.DRF_LOGGER_QUEUE_MAX_SIZE = settings.DRF_LOGGER_QUEUE_MAX_SIZE + self.queue_max_size = settings.DRF_LOGGER_QUEUE_MAX_SIZE - if self.DRF_LOGGER_QUEUE_MAX_SIZE < 1: + if self.queue_max_size < 1: raise Exception(""" DRF API LOGGER EXCEPTION Value of DRF_LOGGER_QUEUE_MAX_SIZE must be greater than 0 """) - self.DRF_LOGGER_INTERVAL = 10 # Default DB insertion interval is 10 seconds. + self.interval = 10 # Default DB insertion interval is 10 seconds. if hasattr(settings, 'DRF_LOGGER_INTERVAL'): - self.DRF_LOGGER_INTERVAL = settings.DRF_LOGGER_INTERVAL + self.interval = settings.DRF_LOGGER_INTERVAL - if self.DRF_LOGGER_INTERVAL < 1: + if self.interval < 1: raise Exception(""" DRF API LOGGER EXCEPTION Value of DRF_LOGGER_INTERVAL must be greater than 0 """) - self._queue = Queue(maxsize=self.DRF_LOGGER_QUEUE_MAX_SIZE) + self._queue = Queue(maxsize=self.queue_max_size) def run(self) -> None: self.start_queue_process() def put_log_data(self, data): - self._queue.put(APILogsModel(**data)) + self._queue.put(APILogs(**data)) - if self._queue.qsize() >= self.DRF_LOGGER_QUEUE_MAX_SIZE: + if self._queue.qsize() >= self.queue_max_size: self._start_bulk_insertion() def start_queue_process(self): while True: - time.sleep(self.DRF_LOGGER_INTERVAL) + time.sleep(self.interval) self._start_bulk_insertion() def _start_bulk_insertion(self): @@ -61,7 +61,7 @@ def _start_bulk_insertion(self): def _insert_into_data_base(self, bulk_item): try: - APILogsModel.objects.using(self.DRF_API_LOGGER_DEFAULT_DATABASE).bulk_create(bulk_item) + APILogs.objects.using(self.default_database).bulk_create(bulk_item) except OperationalError: raise Exception(""" DRF API LOGGER EXCEPTION diff --git a/drf_api_logger/middleware/api_logger_middleware.py b/drf_api_logger/middleware/api_logger_middleware.py index 8134608..caecede 100644 --- a/drf_api_logger/middleware/api_logger_middleware.py +++ b/drf_api_logger/middleware/api_logger_middleware.py @@ -24,49 +24,49 @@ def __init__(self, get_response): self.get_response = get_response # One-time configuration and initialization. - self.DRF_API_LOGGER_DATABASE = False + self.database = False if hasattr(settings, 'DRF_API_LOGGER_DATABASE'): - self.DRF_API_LOGGER_DATABASE = settings.DRF_API_LOGGER_DATABASE + self.database = settings.DRF_API_LOGGER_DATABASE - self.DRF_API_LOGGER_SIGNAL = False + self.signal = False if hasattr(settings, 'DRF_API_LOGGER_SIGNAL'): - self.DRF_API_LOGGER_SIGNAL = settings.DRF_API_LOGGER_SIGNAL + self.signal = settings.DRF_API_LOGGER_SIGNAL - self.DRF_API_LOGGER_PATH_TYPE = 'ABSOLUTE' + self.path_type = 'ABSOLUTE' if hasattr(settings, 'DRF_API_LOGGER_PATH_TYPE'): if settings.DRF_API_LOGGER_PATH_TYPE in ['ABSOLUTE', 'RAW_URI', 'FULL_PATH']: - self.DRF_API_LOGGER_PATH_TYPE = settings.DRF_API_LOGGER_PATH_TYPE + self.path_type = settings.DRF_API_LOGGER_PATH_TYPE - self.DRF_API_LOGGER_SKIP_URL_NAME = [] + self.skip_url_name = [] if hasattr(settings, 'DRF_API_LOGGER_SKIP_URL_NAME'): if type(settings.DRF_API_LOGGER_SKIP_URL_NAME) is tuple or type( settings.DRF_API_LOGGER_SKIP_URL_NAME) is list: - self.DRF_API_LOGGER_SKIP_URL_NAME = settings.DRF_API_LOGGER_SKIP_URL_NAME + self.skip_url_name = settings.DRF_API_LOGGER_SKIP_URL_NAME - self.DRF_API_LOGGER_SKIP_NAMESPACE = [] + self.skip_namespace = [] if hasattr(settings, 'DRF_API_LOGGER_SKIP_NAMESPACE'): if type(settings.DRF_API_LOGGER_SKIP_NAMESPACE) is tuple or type( settings.DRF_API_LOGGER_SKIP_NAMESPACE) is list: - self.DRF_API_LOGGER_SKIP_NAMESPACE = settings.DRF_API_LOGGER_SKIP_NAMESPACE + self.skip_namespace = settings.DRF_API_LOGGER_SKIP_NAMESPACE - self.DRF_API_LOGGER_METHODS = [] + self.methods = [] if hasattr(settings, 'DRF_API_LOGGER_METHODS'): if type(settings.DRF_API_LOGGER_METHODS) is tuple or type( settings.DRF_API_LOGGER_METHODS) is list: - self.DRF_API_LOGGER_METHODS = settings.DRF_API_LOGGER_METHODS + self.methods = settings.DRF_API_LOGGER_METHODS - self.DRF_API_LOGGER_STATUS_CODES = [] + self.status_codes = [] if hasattr(settings, 'DRF_API_LOGGER_STATUS_CODES'): if type(settings.DRF_API_LOGGER_STATUS_CODES) is tuple or type( settings.DRF_API_LOGGER_STATUS_CODES) is list: - self.DRF_API_LOGGER_STATUS_CODES = settings.DRF_API_LOGGER_STATUS_CODES + self.status_codes = settings.DRF_API_LOGGER_STATUS_CODES - self.DRF_API_LOGGER_ENABLE_TRACING = False - self.DRF_API_LOGGER_TRACING_ID_HEADER_NAME = None + self.enable_tracing = False + self.tracing_id_header_name = None if hasattr(settings, 'DRF_API_LOGGER_ENABLE_TRACING'): - self.DRF_API_LOGGER_ENABLE_TRACING = settings.DRF_API_LOGGER_ENABLE_TRACING - if self.DRF_API_LOGGER_ENABLE_TRACING and hasattr(settings, 'DRF_API_LOGGER_TRACING_ID_HEADER_NAME'): - self.DRF_API_LOGGER_TRACING_ID_HEADER_NAME = settings.DRF_API_LOGGER_TRACING_ID_HEADER_NAME + self.enable_tracing = settings.DRF_API_LOGGER_ENABLE_TRACING + if self.enable_tracing and hasattr(settings, 'DRF_API_LOGGER_TRACING_ID_HEADER_NAME'): + self.tracing_id_header_name = settings.DRF_API_LOGGER_TRACING_ID_HEADER_NAME self.tracing_func_name = None if hasattr(settings, 'DRF_API_LOGGER_TRACING_FUNC'): @@ -74,20 +74,20 @@ def __init__(self, get_response): mod = importlib.import_module(mod_name) self.tracing_func_name = getattr(mod, func_name) - self.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE = -1 + self.max_request_body_size = -1 if hasattr(settings, 'DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE'): if type(settings.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE) is int: - self.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE = settings.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE + self.max_request_body_size = settings.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE - self.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE = -1 + self.max_reponse_body_size = -1 if hasattr(settings, 'DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE'): if type(settings.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE) is int: - self.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE = settings.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE + self.max_reponse_body_size = settings.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE def __call__(self, request): # Run only if logger is enabled. - if self.DRF_API_LOGGER_DATABASE or self.DRF_API_LOGGER_SIGNAL: + if self.database or self.signal: url_name = resolve(request.path_info).url_name namespace = resolve(request.path_info).namespace @@ -97,11 +97,11 @@ def __call__(self, request): return self.get_response(request) # Skip for url name - if url_name in self.DRF_API_LOGGER_SKIP_URL_NAME: + if url_name in self.skip_url_name: return self.get_response(request) # Skip entire app using namespace - if namespace in self.DRF_API_LOGGER_SKIP_NAMESPACE: + if namespace in self.skip_namespace: return self.get_response(request) # Code to be executed for each request/response after @@ -115,8 +115,8 @@ def __call__(self, request): request_data = '' try: request_data = json.loads(request.body) if request.body else '' - if self.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE > -1: - if sys.getsizeof(request_data) > self.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE: + if self.max_request_body_size > -1: + if sys.getsizeof(request_data) > self.max_request_body_size: """ Ignore the request body if larger then specified. """ @@ -125,9 +125,9 @@ def __call__(self, request): pass tracing_id = None - if self.DRF_API_LOGGER_ENABLE_TRACING: - if self.DRF_API_LOGGER_TRACING_ID_HEADER_NAME: - tracing_id = headers.get(self.DRF_API_LOGGER_TRACING_ID_HEADER_NAME) + if self.enable_tracing: + if self.tracing_id_header_name: + tracing_id = headers.get(self.tracing_id_header_name) if not tracing_id: """ If tracing is is not present in header, get it from function or uuid. @@ -143,14 +143,14 @@ def __call__(self, request): response = self.get_response(request) # Only log required status codes if matching - if self.DRF_API_LOGGER_STATUS_CODES and response.status_code not in self.DRF_API_LOGGER_STATUS_CODES: + if self.status_codes and response.status_code not in self.status_codes: return response # Log only registered methods if available. - if len(self.DRF_API_LOGGER_METHODS) > 0 and method not in self.DRF_API_LOGGER_METHODS: + if len(self.methods) > 0 and method not in self.methods: return response - self.DRF_API_LOGGER_CONTENT_TYPES = [ + self.content_types = [ "application/json", "application/vnd.api+json", "application/gzip", @@ -161,9 +161,9 @@ def __call__(self, request): ) in (list, tuple): for content_type in settings.DRF_API_LOGGER_CONTENT_TYPES: if re.match(r"^application\/vnd\..+\+json$", content_type): - self.DRF_API_LOGGER_CONTENT_TYPES.append(content_type) + self.content_types.append(content_type) - if response.get("content-type") in self.DRF_API_LOGGER_CONTENT_TYPES: + if response.get("content-type") in self.content_types: if response.get('content-type') == 'application/gzip': response_body = '** GZIP Archive **' elif response.get('content-type') == 'application/octet-stream': @@ -175,14 +175,14 @@ def __call__(self, request): response_body = json.loads(response.content.decode()) else: response_body = json.loads(response.content) - if self.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE > -1: - if sys.getsizeof(response_body) > self.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE: + if self.max_reponse_body_size > -1: + if sys.getsizeof(response_body) > self.max_reponse_body_size: response_body = '' - if self.DRF_API_LOGGER_PATH_TYPE == 'ABSOLUTE': + if self.path_type == 'ABSOLUTE': api = request.build_absolute_uri() - elif self.DRF_API_LOGGER_PATH_TYPE == 'FULL_PATH': + elif self.path_type == 'FULL_PATH': api = request.get_full_path() - elif self.DRF_API_LOGGER_PATH_TYPE == 'RAW_URI': + elif self.path_type == 'RAW_URI': api = request.get_raw_uri() else: api = request.build_absolute_uri() @@ -192,13 +192,15 @@ def __call__(self, request): headers=mask_sensitive_data(headers), body=mask_sensitive_data(request_data), method=method, + user=request.user if request.user.is_authenticated else None, + username=request.user.username if request.user.is_authenticated else None, client_ip_address=get_client_ip(request), response=mask_sensitive_data(response_body), status_code=response.status_code, execution_time=time.time() - start_time, - added_on=timezone.now() + timestamp=timezone.now() ) - if self.DRF_API_LOGGER_DATABASE: + if self.database: if LOGGER_THREAD: d = data.copy() d['headers'] = json.dumps(d['headers'], indent=4, ensure_ascii=False) if d.get('headers') else '' @@ -206,7 +208,7 @@ def __call__(self, request): d['body'] = json.dumps(d['body'], indent=4, ensure_ascii=False) if d.get('body') else '' d['response'] = json.dumps(d['response'], indent=4, ensure_ascii=False) if d.get('response') else '' LOGGER_THREAD.put_log_data(data=d) - if self.DRF_API_LOGGER_SIGNAL: + if self.signal: if tracing_id: data.update({ 'tracing_id': tracing_id diff --git a/drf_api_logger/models.py b/drf_api_logger/models.py index 72f312a..9484087 100644 --- a/drf_api_logger/models.py +++ b/drf_api_logger/models.py @@ -1,5 +1,6 @@ from django.db import models - +from django.contrib.auth.models import User +from django.utils.translation import gettext_lazy as _ from drf_api_logger.utils import database_log_enabled @@ -10,26 +11,29 @@ class BaseModel(models.Model): id = models.BigAutoField(primary_key=True) - added_on = models.DateTimeField() + timestamp = models.DateTimeField(verbose_name=_('Timestamp'), auto_now_add=True) def __str__(self): return str(self.id) class Meta: abstract = True - ordering = ('-added_on',) - - - class APILogsModel(BaseModel): - api = models.CharField(max_length=1024, help_text='API URL') - headers = models.TextField() - body = models.TextField() - method = models.CharField(max_length=10, db_index=True) - client_ip_address = models.CharField(max_length=50) - response = models.TextField() - status_code = models.PositiveSmallIntegerField(help_text='Response status code', db_index=True) + ordering = ('-timestamp',) + + + class APILogs(BaseModel): + api = models.CharField(max_length=1024, verbose_name=_('API URL'), db_index=True) + user = models.ForeignKey(User, verbose_name=_('User'), db_index=True, on_delete=models.DO_NOTHING, null=True) + username = models.CharField(max_length=255, verbose_name=_('User name'), null=True, db_index=True) + headers = models.TextField(verbose_name=_('Request headers')) + body = models.TextField(verbose_name=_('Request body')) + method = models.CharField(max_length=10, db_index=True, verbose_name=_('Request method')) + client_ip_address = models.CharField(max_length=50, verbose_name=_('Client IP')) + response = models.TextField(verbose_name=_('Response')) + status_code = models.PositiveSmallIntegerField(db_index=True, verbose_name=_('Status code')) execution_time = models.DecimalField(decimal_places=5, max_digits=8, - help_text='Server execution time (Not complete response time.)') + help_text=_('Server execution time (Not complete response time.)'), + verbose_name=_('Execution time')) def __str__(self): return self.api