diff --git a/drf_api_logger/admin.py b/drf_api_logger/admin.py index 159a00f..53c50b8 100644 --- a/drf_api_logger/admin.py +++ b/drf_api_logger/admin.py @@ -4,22 +4,21 @@ from django.contrib import admin from django.db.models import Count from django.http import HttpResponse - from drf_api_logger.utils import database_log_enabled if database_log_enabled(): - from drf_api_logger.models import APILogsModel - from django.utils.translation import gettext_lazy as _ import csv + from django.utils.translation import gettext_lazy as _ + from drf_api_logger.models import APILogs class ExportCsvMixin: def export_as_csv(self, request, queryset): meta = self.model._meta field_names = [field.name for field in meta.fields] - response = HttpResponse(content_type='text/csv') - response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta) + response = HttpResponse(content_type="text/csv") + response["Content-Disposition"] = "attachment; filename={}.csv".format(meta) writer = csv.writer(response) writer.writerow(field_names) @@ -28,19 +27,19 @@ def export_as_csv(self, request, queryset): return response - export_as_csv.short_description = "Export Selected" + export_as_csv.short_description = _("Export selected") class SlowAPIsFilter(admin.SimpleListFilter): - title = _('API Performance') + title = _("API performance") # Parameter for the filter that will be used in the URL query. - parameter_name = 'api_performance' + parameter_name = "api_performance" def __init__(self, request, params, model, model_admin): super().__init__(request, params, model, model_admin) - if hasattr(settings, 'DRF_API_LOGGER_SLOW_API_ABOVE'): + if hasattr(settings, "DRF_API_LOGGER_SLOW_API_ABOVE"): if isinstance(settings.DRF_API_LOGGER_SLOW_API_ABOVE, int): # Making sure for integer value. - self._DRF_API_LOGGER_SLOW_API_ABOVE = settings.DRF_API_LOGGER_SLOW_API_ABOVE / 1000 # Converting to seconds. + self._slow_api_above = settings.DRF_API_LOGGER_SLOW_API_ABOVE / 1000 # Converting to seconds. def lookups(self, request, model_admin): """ @@ -50,15 +49,15 @@ def lookups(self, request, model_admin): human-readable name for the option that will appear in the right sidebar. """ - slow = 'Slow' - fast = 'Fast' - if hasattr(settings, 'DRF_API_LOGGER_SLOW_API_ABOVE'): - slow += ', >={}ms'.format(settings.DRF_API_LOGGER_SLOW_API_ABOVE) - fast += ', <{}ms'.format(settings.DRF_API_LOGGER_SLOW_API_ABOVE) + slow = _("Slow") + fast = _("Fast") + if hasattr(settings, "DRF_API_LOGGER_SLOW_API_ABOVE"): + slow += ", >={}ms".format(settings.DRF_API_LOGGER_SLOW_API_ABOVE) + fast += ", <{}ms".format(settings.DRF_API_LOGGER_SLOW_API_ABOVE) return ( - ('slow', _(slow)), - ('fast', _(fast)), + ("slow", _(slow)), + ("fast", _(fast)), ) def queryset(self, request, queryset): @@ -68,10 +67,10 @@ def queryset(self, request, queryset): `self.value()`. """ # to decide how to filter the queryset. - if self.value() == 'slow': - return queryset.filter(execution_time__gte=self._DRF_API_LOGGER_SLOW_API_ABOVE) - if self.value() == 'fast': - return queryset.filter(execution_time__lt=self._DRF_API_LOGGER_SLOW_API_ABOVE) + if self.value() == "slow": + return queryset.filter(execution_time__gte=self._slow_api_above) + if self.value() == "fast": + return queryset.filter(execution_time__lt=self._slow_api_above) return queryset @@ -81,33 +80,57 @@ class APILogsAdmin(admin.ModelAdmin, ExportCsvMixin): def __init__(self, model, admin_site): super().__init__(model, admin_site) - self._DRF_API_LOGGER_TIMEDELTA = 0 - if hasattr(settings, 'DRF_API_LOGGER_SLOW_API_ABOVE'): + self._timedelta = 0 + if hasattr(settings, "DRF_API_LOGGER_SLOW_API_ABOVE"): if isinstance(settings.DRF_API_LOGGER_SLOW_API_ABOVE, int): # Making sure for integer value. self.list_filter += (SlowAPIsFilter,) - if hasattr(settings, 'DRF_API_LOGGER_TIMEDELTA'): + if hasattr(settings, "DRF_API_LOGGER_TIMEDELTA"): if isinstance(settings.DRF_API_LOGGER_TIMEDELTA, int): # Making sure for integer value. - self._DRF_API_LOGGER_TIMEDELTA = settings.DRF_API_LOGGER_TIMEDELTA + self._timedelta = settings.DRF_API_LOGGER_TIMEDELTA - def added_on_time(self, obj): - return (obj.added_on + timedelta(minutes=self._DRF_API_LOGGER_TIMEDELTA)).strftime("%d %b %Y %H:%M:%S") + def time_display(self, obj): + return (obj.timestamp + timedelta(minutes=self._timedelta)).strftime("%d %b %Y %H:%M:%S") - added_on_time.admin_order_field = 'added_on' - added_on_time.short_description = 'Added on' + time_display.admin_order_field = "timestamp" + time_display.short_description = _("Timestamp") list_per_page = 20 - list_display = ('id', 'api', 'method', 'status_code', 'execution_time', 'added_on_time',) - list_filter = ('added_on', 'status_code', 'method',) - search_fields = ('body', 'response', 'headers', 'api',) + list_display = ( + "api", + "user", + "method", + "status_code", + "execution_time", + "time_display", + ) + list_filter = ( + "timestamp", + "status_code", + "method", + ) + search_fields = ( + "body", + "response", + "headers", + "api", + ) readonly_fields = ( - 'execution_time', 'client_ip_address', 'api', - 'headers', 'body', 'method', 'response', 'status_code', 'added_on_time', + "user", + "execution_time", + "client_ip_address", + "api", + "headers", + "body", + "method", + "response", + "status_code", + "time_display", ) - exclude = ('added_on',) + exclude = ("timestamp",) - change_list_template = 'charts_change_list.html' - change_form_template = 'change_form.html' - date_hierarchy = 'added_on' + change_list_template = "charts_change_list.html" + change_form_template = "change_form.html" + date_hierarchy = "timestamp" def changelist_view(self, request, extra_context=None): response = super(APILogsAdmin, self).changelist_view(request, extra_context) @@ -115,32 +138,36 @@ def changelist_view(self, request, extra_context=None): filtered_query_set = response.context_data["cl"].queryset except Exception: return response - analytics_model = filtered_query_set.values('added_on__date').annotate(total=Count('id')).order_by('total') - status_code_count_mode = filtered_query_set.values('id').values('status_code').annotate( - total=Count('id')).order_by('status_code') + analytics_model = filtered_query_set.values("timestamp__date").annotate(total=Count("id")).order_by("total") + status_code_count_mode = ( + filtered_query_set.values("id") + .values("status_code") + .annotate(total=Count("id")) + .order_by("status_code") + ) status_code_count_keys = list() status_code_count_values = list() for item in status_code_count_mode: - status_code_count_keys.append(item.get('status_code')) - status_code_count_values.append(item.get('total')) + status_code_count_keys.append(item.get("status_code")) + status_code_count_values.append(item.get("total")) extra_context = dict( analytics=analytics_model, status_code_count_keys=status_code_count_keys, - status_code_count_values=status_code_count_values + status_code_count_values=status_code_count_values, ) response.context_data.update(extra_context) return response def get_queryset(self, request): - drf_api_logger_default_database = 'default' - if hasattr(settings, 'DRF_API_LOGGER_DEFAULT_DATABASE'): + drf_api_logger_default_database = "default" + if hasattr(settings, "DRF_API_LOGGER_DEFAULT_DATABASE"): drf_api_logger_default_database = settings.DRF_API_LOGGER_DEFAULT_DATABASE return super(APILogsAdmin, self).get_queryset(request).using(drf_api_logger_default_database) - def changeform_view(self, request, object_id=None, form_url='', extra_context=None): - if request.GET.get('export', False): - drf_api_logger_default_database = 'default' - if hasattr(settings, 'DRF_API_LOGGER_DEFAULT_DATABASE'): + def changeform_view(self, request, object_id=None, form_url="", extra_context=None): + if request.GET.get("export", False): + drf_api_logger_default_database = "default" + if hasattr(settings, "DRF_API_LOGGER_DEFAULT_DATABASE"): drf_api_logger_default_database = settings.DRF_API_LOGGER_DEFAULT_DATABASE export_queryset = self.get_queryset(request).filter(pk=object_id).using(drf_api_logger_default_database) return self.export_as_csv(request, export_queryset) @@ -152,5 +179,4 @@ def has_add_permission(self, request, obj=None): def has_change_permission(self, request, obj=None): return False - - admin.site.register(APILogsModel, APILogsAdmin) + admin.site.register(APILogs, APILogsAdmin) diff --git a/drf_api_logger/apps.py b/drf_api_logger/apps.py index b9638da..f9e7118 100644 --- a/drf_api_logger/apps.py +++ b/drf_api_logger/apps.py @@ -1,6 +1,8 @@ from django.apps import AppConfig +from django.utils.translation import gettext_lazy as _ class LoggerConfig(AppConfig): name = 'drf_api_logger' - verbose_name = 'DRF API Logger' + verbose_name = _('DRF API Logger') + verbose_name_plural = _('DRF API Logger') diff --git a/drf_api_logger/insert_log_into_database.py b/drf_api_logger/insert_log_into_database.py index c96d542..2c06f33 100644 --- a/drf_api_logger/insert_log_into_database.py +++ b/drf_api_logger/insert_log_into_database.py @@ -1,10 +1,10 @@ from queue import Queue import time -from django.conf import settings from threading import Thread +from django.conf import settings from django.db.utils import OperationalError -from drf_api_logger.models import APILogsModel +from drf_api_logger.models import APILogs class InsertLogIntoDatabase(Thread): @@ -12,44 +12,44 @@ class InsertLogIntoDatabase(Thread): def __init__(self): super().__init__() - self.DRF_API_LOGGER_DEFAULT_DATABASE = 'default' + self.default_database = 'default' if hasattr(settings, 'DRF_API_LOGGER_DEFAULT_DATABASE'): - self.DRF_API_LOGGER_DEFAULT_DATABASE = settings.DRF_API_LOGGER_DEFAULT_DATABASE + self.default_database = settings.DRF_API_LOGGER_DEFAULT_DATABASE - self.DRF_LOGGER_QUEUE_MAX_SIZE = 50 # Default queue size 50 + self.queue_max_size = 50 # Default queue size 50 if hasattr(settings, 'DRF_LOGGER_QUEUE_MAX_SIZE'): - self.DRF_LOGGER_QUEUE_MAX_SIZE = settings.DRF_LOGGER_QUEUE_MAX_SIZE + self.queue_max_size = settings.DRF_LOGGER_QUEUE_MAX_SIZE - if self.DRF_LOGGER_QUEUE_MAX_SIZE < 1: + if self.queue_max_size < 1: raise Exception(""" DRF API LOGGER EXCEPTION Value of DRF_LOGGER_QUEUE_MAX_SIZE must be greater than 0 """) - self.DRF_LOGGER_INTERVAL = 10 # Default DB insertion interval is 10 seconds. + self.interval = 10 # Default DB insertion interval is 10 seconds. if hasattr(settings, 'DRF_LOGGER_INTERVAL'): - self.DRF_LOGGER_INTERVAL = settings.DRF_LOGGER_INTERVAL + self.interval = settings.DRF_LOGGER_INTERVAL - if self.DRF_LOGGER_INTERVAL < 1: + if self.interval < 1: raise Exception(""" DRF API LOGGER EXCEPTION Value of DRF_LOGGER_INTERVAL must be greater than 0 """) - self._queue = Queue(maxsize=self.DRF_LOGGER_QUEUE_MAX_SIZE) + self._queue = Queue(maxsize=self.queue_max_size) def run(self) -> None: self.start_queue_process() def put_log_data(self, data): - self._queue.put(APILogsModel(**data)) + self._queue.put(APILogs(**data)) - if self._queue.qsize() >= self.DRF_LOGGER_QUEUE_MAX_SIZE: + if self._queue.qsize() >= self.queue_max_size: self._start_bulk_insertion() def start_queue_process(self): while True: - time.sleep(self.DRF_LOGGER_INTERVAL) + time.sleep(self.interval) self._start_bulk_insertion() def _start_bulk_insertion(self): @@ -61,7 +61,7 @@ def _start_bulk_insertion(self): def _insert_into_data_base(self, bulk_item): try: - APILogsModel.objects.using(self.DRF_API_LOGGER_DEFAULT_DATABASE).bulk_create(bulk_item) + APILogs.objects.using(self.default_database).bulk_create(bulk_item) except OperationalError: raise Exception(""" DRF API LOGGER EXCEPTION diff --git a/drf_api_logger/middleware/api_logger_middleware.py b/drf_api_logger/middleware/api_logger_middleware.py index b461c6c..c043963 100644 --- a/drf_api_logger/middleware/api_logger_middleware.py +++ b/drf_api_logger/middleware/api_logger_middleware.py @@ -1,17 +1,16 @@ import importlib import json +import re import sys import time import uuid -import re from django.conf import settings from django.urls import resolve from django.utils import timezone - from drf_api_logger import API_LOGGER_SIGNAL from drf_api_logger.start_logger_when_server_starts import LOGGER_THREAD -from drf_api_logger.utils import get_headers, get_client_ip, mask_sensitive_data +from drf_api_logger.utils import get_client_ip, get_headers, mask_sensitive_data class APILoggerMiddleware: @@ -19,70 +18,75 @@ def __init__(self, get_response): self.get_response = get_response # One-time configuration and initialization. - self.DRF_API_LOGGER_DATABASE = False - if hasattr(settings, 'DRF_API_LOGGER_DATABASE'): - self.DRF_API_LOGGER_DATABASE = settings.DRF_API_LOGGER_DATABASE - - self.DRF_API_LOGGER_SIGNAL = False - if hasattr(settings, 'DRF_API_LOGGER_SIGNAL'): - self.DRF_API_LOGGER_SIGNAL = settings.DRF_API_LOGGER_SIGNAL - - self.DRF_API_LOGGER_PATH_TYPE = 'ABSOLUTE' - if hasattr(settings, 'DRF_API_LOGGER_PATH_TYPE'): - if settings.DRF_API_LOGGER_PATH_TYPE in ['ABSOLUTE', 'RAW_URI', 'FULL_PATH']: - self.DRF_API_LOGGER_PATH_TYPE = settings.DRF_API_LOGGER_PATH_TYPE - - self.DRF_API_LOGGER_SKIP_URL_NAME = [] - if hasattr(settings, 'DRF_API_LOGGER_SKIP_URL_NAME'): - if type(settings.DRF_API_LOGGER_SKIP_URL_NAME) is tuple or type( - settings.DRF_API_LOGGER_SKIP_URL_NAME) is list: - self.DRF_API_LOGGER_SKIP_URL_NAME = settings.DRF_API_LOGGER_SKIP_URL_NAME - - self.DRF_API_LOGGER_SKIP_NAMESPACE = [] - if hasattr(settings, 'DRF_API_LOGGER_SKIP_NAMESPACE'): - if type(settings.DRF_API_LOGGER_SKIP_NAMESPACE) is tuple or type( - settings.DRF_API_LOGGER_SKIP_NAMESPACE) is list: - self.DRF_API_LOGGER_SKIP_NAMESPACE = settings.DRF_API_LOGGER_SKIP_NAMESPACE - - self.DRF_API_LOGGER_METHODS = [] - if hasattr(settings, 'DRF_API_LOGGER_METHODS'): - if type(settings.DRF_API_LOGGER_METHODS) is tuple or type( - settings.DRF_API_LOGGER_METHODS) is list: - self.DRF_API_LOGGER_METHODS = settings.DRF_API_LOGGER_METHODS - - self.DRF_API_LOGGER_STATUS_CODES = [] - if hasattr(settings, 'DRF_API_LOGGER_STATUS_CODES'): - if type(settings.DRF_API_LOGGER_STATUS_CODES) is tuple or type( - settings.DRF_API_LOGGER_STATUS_CODES) is list: - self.DRF_API_LOGGER_STATUS_CODES = settings.DRF_API_LOGGER_STATUS_CODES - - self.DRF_API_LOGGER_ENABLE_TRACING = False - self.DRF_API_LOGGER_TRACING_ID_HEADER_NAME = None - if hasattr(settings, 'DRF_API_LOGGER_ENABLE_TRACING'): - self.DRF_API_LOGGER_ENABLE_TRACING = settings.DRF_API_LOGGER_ENABLE_TRACING - if self.DRF_API_LOGGER_ENABLE_TRACING and hasattr(settings, 'DRF_API_LOGGER_TRACING_ID_HEADER_NAME'): - self.DRF_API_LOGGER_TRACING_ID_HEADER_NAME = settings.DRF_API_LOGGER_TRACING_ID_HEADER_NAME + self.database = False + if hasattr(settings, "DRF_API_LOGGER_DATABASE"): + self.database = settings.DRF_API_LOGGER_DATABASE + + self.signal = False + if hasattr(settings, "DRF_API_LOGGER_SIGNAL"): + self.signal = settings.DRF_API_LOGGER_SIGNAL + + self.path_type = "ABSOLUTE" + if hasattr(settings, "DRF_API_LOGGER_PATH_TYPE"): + if settings.DRF_API_LOGGER_PATH_TYPE in ["ABSOLUTE", "RAW_URI", "FULL_PATH"]: + self.path_type = settings.DRF_API_LOGGER_PATH_TYPE + + self.skip_url_name = [] + if hasattr(settings, "DRF_API_LOGGER_SKIP_URL_NAME"): + if ( + type(settings.DRF_API_LOGGER_SKIP_URL_NAME) is tuple + or type(settings.DRF_API_LOGGER_SKIP_URL_NAME) is list + ): + self.skip_url_name = settings.DRF_API_LOGGER_SKIP_URL_NAME + + self.skip_namespace = [] + if hasattr(settings, "DRF_API_LOGGER_SKIP_NAMESPACE"): + if ( + type(settings.DRF_API_LOGGER_SKIP_NAMESPACE) is tuple + or type(settings.DRF_API_LOGGER_SKIP_NAMESPACE) is list + ): + self.skip_namespace = settings.DRF_API_LOGGER_SKIP_NAMESPACE + + self.methods = [] + if hasattr(settings, "DRF_API_LOGGER_METHODS"): + if type(settings.DRF_API_LOGGER_METHODS) is tuple or type(settings.DRF_API_LOGGER_METHODS) is list: + self.methods = settings.DRF_API_LOGGER_METHODS + + self.status_codes = [] + if hasattr(settings, "DRF_API_LOGGER_STATUS_CODES"): + if ( + type(settings.DRF_API_LOGGER_STATUS_CODES) is tuple + or type(settings.DRF_API_LOGGER_STATUS_CODES) is list + ): + self.status_codes = settings.DRF_API_LOGGER_STATUS_CODES + + self.enable_tracing = False + self.tracing_id_header_name = None + if hasattr(settings, "DRF_API_LOGGER_ENABLE_TRACING"): + self.enable_tracing = settings.DRF_API_LOGGER_ENABLE_TRACING + if self.enable_tracing and hasattr(settings, "DRF_API_LOGGER_TRACING_ID_HEADER_NAME"): + self.tracing_id_header_name = settings.DRF_API_LOGGER_TRACING_ID_HEADER_NAME self.tracing_func_name = None - if hasattr(settings, 'DRF_API_LOGGER_TRACING_FUNC'): - mod_name, func_name = settings.DRF_API_LOGGER_TRACING_FUNC.rsplit('.', 1) + if hasattr(settings, "DRF_API_LOGGER_TRACING_FUNC"): + mod_name, func_name = settings.DRF_API_LOGGER_TRACING_FUNC.rsplit(".", 1) mod = importlib.import_module(mod_name) self.tracing_func_name = getattr(mod, func_name) - self.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE = -1 - if hasattr(settings, 'DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE'): + self.max_request_body_size = -1 + if hasattr(settings, "DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE"): if type(settings.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE) is int: - self.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE = settings.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE + self.max_request_body_size = settings.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE - self.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE = -1 - if hasattr(settings, 'DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE'): + self.max_reponse_body_size = -1 + if hasattr(settings, "DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE"): if type(settings.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE) is int: - self.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE = settings.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE + self.max_reponse_body_size = settings.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE def is_static_or_media_request(self, path): - static_url = getattr(settings, 'STATIC_URL', '/static/') - media_url = getattr(settings, 'MEDIA_URL', '/media/') - + static_url = getattr(settings, "STATIC_URL", "/static/") + media_url = getattr(settings, "MEDIA_URL", "/media/") + return path.startswith(static_url) or path.startswith(media_url) def __call__(self, request): @@ -91,21 +95,21 @@ def __call__(self, request): return self.get_response(request) # Run only if logger is enabled. - if self.DRF_API_LOGGER_DATABASE or self.DRF_API_LOGGER_SIGNAL: + if self.database or self.signal: url_name = resolve(request.path_info).url_name namespace = resolve(request.path_info).namespace # Always skip Admin panel - if namespace == 'admin': + if namespace == "admin": return self.get_response(request) # Skip for url name - if url_name in self.DRF_API_LOGGER_SKIP_URL_NAME: + if url_name in self.skip_url_name: return self.get_response(request) # Skip entire app using namespace - if namespace in self.DRF_API_LOGGER_SKIP_NAMESPACE: + if namespace in self.skip_namespace: return self.get_response(request) # Code to be executed for each request/response after @@ -116,22 +120,22 @@ def __call__(self, request): headers = get_headers(request=request) method = request.method - request_data = '' + request_data = "" try: - request_data = json.loads(request.body) if request.body else '' - if self.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE > -1: - if sys.getsizeof(request_data) > self.DRF_API_LOGGER_MAX_REQUEST_BODY_SIZE: + request_data = json.loads(request.body) if request.body else "" + if self.max_request_body_size > -1: + if sys.getsizeof(request_data) > self.max_request_body_size: """ Ignore the request body if larger then specified. """ - request_data = '' + request_data = "" except Exception: pass tracing_id = None - if self.DRF_API_LOGGER_ENABLE_TRACING: - if self.DRF_API_LOGGER_TRACING_ID_HEADER_NAME: - tracing_id = headers.get(self.DRF_API_LOGGER_TRACING_ID_HEADER_NAME) + if self.enable_tracing: + if self.tracing_id_header_name: + tracing_id = headers.get(self.tracing_id_header_name) if not tracing_id: """ If tracing is is not present in header, get it from function or uuid. @@ -147,50 +151,51 @@ def __call__(self, request): response = self.get_response(request) # Only log required status codes if matching - if self.DRF_API_LOGGER_STATUS_CODES and response.status_code not in self.DRF_API_LOGGER_STATUS_CODES: + if self.status_codes and response.status_code not in self.status_codes: return response # Log only registered methods if available. - if len(self.DRF_API_LOGGER_METHODS) > 0 and method not in self.DRF_API_LOGGER_METHODS: + if len(self.methods) > 0 and method not in self.methods: return response - self.DRF_API_LOGGER_CONTENT_TYPES = [ + self.content_types = [ "application/json", "application/vnd.api+json", "application/gzip", "application/octet-stream", "text/calendar", ] - if hasattr(settings, "DRF_API_LOGGER_CONTENT_TYPES") and type( - settings.DRF_API_LOGGER_CONTENT_TYPES - ) in (list, tuple): + if hasattr(settings, "DRF_API_LOGGER_CONTENT_TYPES") and type(settings.DRF_API_LOGGER_CONTENT_TYPES) in ( + list, + tuple, + ): for content_type in settings.DRF_API_LOGGER_CONTENT_TYPES: if re.match(r"^application\/vnd\..+\+json$", content_type): - self.DRF_API_LOGGER_CONTENT_TYPES.append(content_type) - - if response.get("content-type") in self.DRF_API_LOGGER_CONTENT_TYPES: - if response.get('content-type') == 'application/gzip': - response_body = '** GZIP Archive **' - elif response.get('content-type') == 'application/octet-stream': - response_body = '** Binary File **' - elif getattr(response, 'streaming', False): - response_body = '** Streaming **' - elif response.get('content-type') == 'text/calendar': - response_body = '** Calendar **' + self.content_types.append(content_type) + + if response.get("content-type") in self.content_types: + if response.get("content-type") == "application/gzip": + response_body = "** GZIP Archive **" + elif response.get("content-type") == "application/octet-stream": + response_body = "** Binary File **" + elif getattr(response, "streaming", False): + response_body = "** Streaming **" + elif response.get("content-type") == "text/calendar": + response_body = "** Calendar **" else: if type(response.content) is bytes: response_body = json.loads(response.content.decode()) else: response_body = json.loads(response.content) - if self.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE > -1: - if sys.getsizeof(response_body) > self.DRF_API_LOGGER_MAX_RESPONSE_BODY_SIZE: - response_body = '' - if self.DRF_API_LOGGER_PATH_TYPE == 'ABSOLUTE': + if self.max_reponse_body_size > -1: + if sys.getsizeof(response_body) > self.max_reponse_body_size: + response_body = "" + if self.path_type == "ABSOLUTE": api = request.build_absolute_uri() - elif self.DRF_API_LOGGER_PATH_TYPE == 'FULL_PATH': + elif self.path_type == "FULL_PATH": api = request.get_full_path() - elif self.DRF_API_LOGGER_PATH_TYPE == 'RAW_URI': + elif self.path_type == "RAW_URI": api = request.get_raw_uri() else: api = request.build_absolute_uri() @@ -200,24 +205,24 @@ def __call__(self, request): headers=mask_sensitive_data(headers), body=mask_sensitive_data(request_data), method=method, + user=request.user if request.user.is_authenticated else None, + username=request.user.username if request.user.is_authenticated else None, client_ip_address=get_client_ip(request), response=mask_sensitive_data(response_body), status_code=response.status_code, execution_time=time.time() - start_time, - added_on=timezone.now() + timestamp=timezone.now(), ) - if self.DRF_API_LOGGER_DATABASE and LOGGER_THREAD: + if self.database and LOGGER_THREAD: d = data.copy() - d['headers'] = json.dumps(d['headers'], indent=4, ensure_ascii=False) if d.get('headers') else '' + d["headers"] = json.dumps(d["headers"], indent=4, ensure_ascii=False) if d.get("headers") else "" if request_data: - d['body'] = json.dumps(d['body'], indent=4, ensure_ascii=False) if d.get('body') else '' - d['response'] = json.dumps(d['response'], indent=4, ensure_ascii=False) if d.get('response') else '' + d["body"] = json.dumps(d["body"], indent=4, ensure_ascii=False) if d.get("body") else "" + d["response"] = json.dumps(d["response"], indent=4, ensure_ascii=False) if d.get("response") else "" LOGGER_THREAD.put_log_data(data=d) - if self.DRF_API_LOGGER_SIGNAL: + if self.signal: if tracing_id: - data.update({ - 'tracing_id': tracing_id - }) + data.update({"tracing_id": tracing_id}) API_LOGGER_SIGNAL.listen(**data) else: return response diff --git a/drf_api_logger/models.py b/drf_api_logger/models.py index 72f312a..9484087 100644 --- a/drf_api_logger/models.py +++ b/drf_api_logger/models.py @@ -1,5 +1,6 @@ from django.db import models - +from django.contrib.auth.models import User +from django.utils.translation import gettext_lazy as _ from drf_api_logger.utils import database_log_enabled @@ -10,26 +11,29 @@ class BaseModel(models.Model): id = models.BigAutoField(primary_key=True) - added_on = models.DateTimeField() + timestamp = models.DateTimeField(verbose_name=_('Timestamp'), auto_now_add=True) def __str__(self): return str(self.id) class Meta: abstract = True - ordering = ('-added_on',) - - - class APILogsModel(BaseModel): - api = models.CharField(max_length=1024, help_text='API URL') - headers = models.TextField() - body = models.TextField() - method = models.CharField(max_length=10, db_index=True) - client_ip_address = models.CharField(max_length=50) - response = models.TextField() - status_code = models.PositiveSmallIntegerField(help_text='Response status code', db_index=True) + ordering = ('-timestamp',) + + + class APILogs(BaseModel): + api = models.CharField(max_length=1024, verbose_name=_('API URL'), db_index=True) + user = models.ForeignKey(User, verbose_name=_('User'), db_index=True, on_delete=models.DO_NOTHING, null=True) + username = models.CharField(max_length=255, verbose_name=_('User name'), null=True, db_index=True) + headers = models.TextField(verbose_name=_('Request headers')) + body = models.TextField(verbose_name=_('Request body')) + method = models.CharField(max_length=10, db_index=True, verbose_name=_('Request method')) + client_ip_address = models.CharField(max_length=50, verbose_name=_('Client IP')) + response = models.TextField(verbose_name=_('Response')) + status_code = models.PositiveSmallIntegerField(db_index=True, verbose_name=_('Status code')) execution_time = models.DecimalField(decimal_places=5, max_digits=8, - help_text='Server execution time (Not complete response time.)') + help_text=_('Server execution time (Not complete response time.)'), + verbose_name=_('Execution time')) def __str__(self): return self.api