Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable i18n, add user / username field, pylint styling #96

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
130 changes: 78 additions & 52 deletions drf_api_logger/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,21 @@
from django.contrib import admin
from django.db.models import Count
from django.http import HttpResponse

from drf_api_logger.utils import database_log_enabled

if database_log_enabled():
from drf_api_logger.models import APILogsModel
from django.utils.translation import gettext_lazy as _
import csv

from django.utils.translation import gettext_lazy as _
from drf_api_logger.models import APILogs

class ExportCsvMixin:
def export_as_csv(self, request, queryset):
meta = self.model._meta
field_names = [field.name for field in meta.fields]

response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
response = HttpResponse(content_type="text/csv")
response["Content-Disposition"] = "attachment; filename={}.csv".format(meta)
writer = csv.writer(response)

writer.writerow(field_names)
Expand All @@ -28,19 +27,19 @@ def export_as_csv(self, request, queryset):

return response

export_as_csv.short_description = "Export Selected"
export_as_csv.short_description = _("Export selected")

class SlowAPIsFilter(admin.SimpleListFilter):
title = _('API Performance')
title = _("API performance")

# Parameter for the filter that will be used in the URL query.
parameter_name = 'api_performance'
parameter_name = "api_performance"

def __init__(self, request, params, model, model_admin):
super().__init__(request, params, model, model_admin)
if hasattr(settings, 'DRF_API_LOGGER_SLOW_API_ABOVE'):
if hasattr(settings, "DRF_API_LOGGER_SLOW_API_ABOVE"):
if isinstance(settings.DRF_API_LOGGER_SLOW_API_ABOVE, int): # Making sure for integer value.
self._DRF_API_LOGGER_SLOW_API_ABOVE = settings.DRF_API_LOGGER_SLOW_API_ABOVE / 1000 # Converting to seconds.
self._slow_api_above = settings.DRF_API_LOGGER_SLOW_API_ABOVE / 1000 # Converting to seconds.

def lookups(self, request, model_admin):
"""
Expand All @@ -50,15 +49,15 @@ def lookups(self, request, model_admin):
human-readable name for the option that will appear
in the right sidebar.
"""
slow = 'Slow'
fast = 'Fast'
if hasattr(settings, 'DRF_API_LOGGER_SLOW_API_ABOVE'):
slow += ', >={}ms'.format(settings.DRF_API_LOGGER_SLOW_API_ABOVE)
fast += ', <{}ms'.format(settings.DRF_API_LOGGER_SLOW_API_ABOVE)
slow = _("Slow")
fast = _("Fast")
if hasattr(settings, "DRF_API_LOGGER_SLOW_API_ABOVE"):
slow += ", >={}ms".format(settings.DRF_API_LOGGER_SLOW_API_ABOVE)
fast += ", <{}ms".format(settings.DRF_API_LOGGER_SLOW_API_ABOVE)

return (
('slow', _(slow)),
('fast', _(fast)),
("slow", _(slow)),
("fast", _(fast)),
)

def queryset(self, request, queryset):
Expand All @@ -68,10 +67,10 @@ def queryset(self, request, queryset):
`self.value()`.
"""
# to decide how to filter the queryset.
if self.value() == 'slow':
return queryset.filter(execution_time__gte=self._DRF_API_LOGGER_SLOW_API_ABOVE)
if self.value() == 'fast':
return queryset.filter(execution_time__lt=self._DRF_API_LOGGER_SLOW_API_ABOVE)
if self.value() == "slow":
return queryset.filter(execution_time__gte=self._slow_api_above)
if self.value() == "fast":
return queryset.filter(execution_time__lt=self._slow_api_above)

return queryset

Expand All @@ -81,66 +80,94 @@ class APILogsAdmin(admin.ModelAdmin, ExportCsvMixin):

def __init__(self, model, admin_site):
super().__init__(model, admin_site)
self._DRF_API_LOGGER_TIMEDELTA = 0
if hasattr(settings, 'DRF_API_LOGGER_SLOW_API_ABOVE'):
self._timedelta = 0
if hasattr(settings, "DRF_API_LOGGER_SLOW_API_ABOVE"):
if isinstance(settings.DRF_API_LOGGER_SLOW_API_ABOVE, int): # Making sure for integer value.
self.list_filter += (SlowAPIsFilter,)
if hasattr(settings, 'DRF_API_LOGGER_TIMEDELTA'):
if hasattr(settings, "DRF_API_LOGGER_TIMEDELTA"):
if isinstance(settings.DRF_API_LOGGER_TIMEDELTA, int): # Making sure for integer value.
self._DRF_API_LOGGER_TIMEDELTA = settings.DRF_API_LOGGER_TIMEDELTA
self._timedelta = settings.DRF_API_LOGGER_TIMEDELTA

def added_on_time(self, obj):
return (obj.added_on + timedelta(minutes=self._DRF_API_LOGGER_TIMEDELTA)).strftime("%d %b %Y %H:%M:%S")
def time_display(self, obj):
return (obj.timestamp + timedelta(minutes=self._timedelta)).strftime("%d %b %Y %H:%M:%S")

added_on_time.admin_order_field = 'added_on'
added_on_time.short_description = 'Added on'
time_display.admin_order_field = "timestamp"
time_display.short_description = _("Timestamp")

list_per_page = 20
list_display = ('id', 'api', 'method', 'status_code', 'execution_time', 'added_on_time',)
list_filter = ('added_on', 'status_code', 'method',)
search_fields = ('body', 'response', 'headers', 'api',)
list_display = (
"api",
"user",
"method",
"status_code",
"execution_time",
"time_display",
)
list_filter = (
"timestamp",
"status_code",
"method",
)
search_fields = (
"body",
"response",
"headers",
"api",
)
readonly_fields = (
'execution_time', 'client_ip_address', 'api',
'headers', 'body', 'method', 'response', 'status_code', 'added_on_time',
"user",
"execution_time",
"client_ip_address",
"api",
"headers",
"body",
"method",
"response",
"status_code",
"time_display",
)
exclude = ('added_on',)
exclude = ("timestamp",)

change_list_template = 'charts_change_list.html'
change_form_template = 'change_form.html'
date_hierarchy = 'added_on'
change_list_template = "charts_change_list.html"
change_form_template = "change_form.html"
date_hierarchy = "timestamp"

def changelist_view(self, request, extra_context=None):
response = super(APILogsAdmin, self).changelist_view(request, extra_context)
try:
filtered_query_set = response.context_data["cl"].queryset
except Exception:
return response
analytics_model = filtered_query_set.values('added_on__date').annotate(total=Count('id')).order_by('total')
status_code_count_mode = filtered_query_set.values('id').values('status_code').annotate(
total=Count('id')).order_by('status_code')
analytics_model = filtered_query_set.values("timestamp__date").annotate(total=Count("id")).order_by("total")
status_code_count_mode = (
filtered_query_set.values("id")
.values("status_code")
.annotate(total=Count("id"))
.order_by("status_code")
)
status_code_count_keys = list()
status_code_count_values = list()
for item in status_code_count_mode:
status_code_count_keys.append(item.get('status_code'))
status_code_count_values.append(item.get('total'))
status_code_count_keys.append(item.get("status_code"))
status_code_count_values.append(item.get("total"))
extra_context = dict(
analytics=analytics_model,
status_code_count_keys=status_code_count_keys,
status_code_count_values=status_code_count_values
status_code_count_values=status_code_count_values,
)
response.context_data.update(extra_context)
return response

def get_queryset(self, request):
drf_api_logger_default_database = 'default'
if hasattr(settings, 'DRF_API_LOGGER_DEFAULT_DATABASE'):
drf_api_logger_default_database = "default"
if hasattr(settings, "DRF_API_LOGGER_DEFAULT_DATABASE"):
drf_api_logger_default_database = settings.DRF_API_LOGGER_DEFAULT_DATABASE
return super(APILogsAdmin, self).get_queryset(request).using(drf_api_logger_default_database)

def changeform_view(self, request, object_id=None, form_url='', extra_context=None):
if request.GET.get('export', False):
drf_api_logger_default_database = 'default'
if hasattr(settings, 'DRF_API_LOGGER_DEFAULT_DATABASE'):
def changeform_view(self, request, object_id=None, form_url="", extra_context=None):
if request.GET.get("export", False):
drf_api_logger_default_database = "default"
if hasattr(settings, "DRF_API_LOGGER_DEFAULT_DATABASE"):
drf_api_logger_default_database = settings.DRF_API_LOGGER_DEFAULT_DATABASE
export_queryset = self.get_queryset(request).filter(pk=object_id).using(drf_api_logger_default_database)
return self.export_as_csv(request, export_queryset)
Expand All @@ -152,5 +179,4 @@ def has_add_permission(self, request, obj=None):
def has_change_permission(self, request, obj=None):
return False


admin.site.register(APILogsModel, APILogsAdmin)
admin.site.register(APILogs, APILogsAdmin)
4 changes: 3 additions & 1 deletion drf_api_logger/apps.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _


class LoggerConfig(AppConfig):
name = 'drf_api_logger'
verbose_name = 'DRF API Logger'
verbose_name = _('DRF API Logger')
verbose_name_plural = _('DRF API Logger')
30 changes: 15 additions & 15 deletions drf_api_logger/insert_log_into_database.py
Original file line number Diff line number Diff line change
@@ -1,55 +1,55 @@
from queue import Queue
import time
from django.conf import settings
from threading import Thread
from django.conf import settings
from django.db.utils import OperationalError

from drf_api_logger.models import APILogsModel
from drf_api_logger.models import APILogs


class InsertLogIntoDatabase(Thread):

def __init__(self):
super().__init__()

self.DRF_API_LOGGER_DEFAULT_DATABASE = 'default'
self.default_database = 'default'
if hasattr(settings, 'DRF_API_LOGGER_DEFAULT_DATABASE'):
self.DRF_API_LOGGER_DEFAULT_DATABASE = settings.DRF_API_LOGGER_DEFAULT_DATABASE
self.default_database = settings.DRF_API_LOGGER_DEFAULT_DATABASE

self.DRF_LOGGER_QUEUE_MAX_SIZE = 50 # Default queue size 50
self.queue_max_size = 50 # Default queue size 50
if hasattr(settings, 'DRF_LOGGER_QUEUE_MAX_SIZE'):
self.DRF_LOGGER_QUEUE_MAX_SIZE = settings.DRF_LOGGER_QUEUE_MAX_SIZE
self.queue_max_size = settings.DRF_LOGGER_QUEUE_MAX_SIZE

if self.DRF_LOGGER_QUEUE_MAX_SIZE < 1:
if self.queue_max_size < 1:
raise Exception("""
DRF API LOGGER EXCEPTION
Value of DRF_LOGGER_QUEUE_MAX_SIZE must be greater than 0
""")

self.DRF_LOGGER_INTERVAL = 10 # Default DB insertion interval is 10 seconds.
self.interval = 10 # Default DB insertion interval is 10 seconds.
if hasattr(settings, 'DRF_LOGGER_INTERVAL'):
self.DRF_LOGGER_INTERVAL = settings.DRF_LOGGER_INTERVAL
self.interval = settings.DRF_LOGGER_INTERVAL

if self.DRF_LOGGER_INTERVAL < 1:
if self.interval < 1:
raise Exception("""
DRF API LOGGER EXCEPTION
Value of DRF_LOGGER_INTERVAL must be greater than 0
""")

self._queue = Queue(maxsize=self.DRF_LOGGER_QUEUE_MAX_SIZE)
self._queue = Queue(maxsize=self.queue_max_size)

def run(self) -> None:
self.start_queue_process()

def put_log_data(self, data):
self._queue.put(APILogsModel(**data))
self._queue.put(APILogs(**data))

if self._queue.qsize() >= self.DRF_LOGGER_QUEUE_MAX_SIZE:
if self._queue.qsize() >= self.queue_max_size:
self._start_bulk_insertion()

def start_queue_process(self):
while True:
time.sleep(self.DRF_LOGGER_INTERVAL)
time.sleep(self.interval)
self._start_bulk_insertion()

def _start_bulk_insertion(self):
Expand All @@ -61,7 +61,7 @@ def _start_bulk_insertion(self):

def _insert_into_data_base(self, bulk_item):
try:
APILogsModel.objects.using(self.DRF_API_LOGGER_DEFAULT_DATABASE).bulk_create(bulk_item)
APILogs.objects.using(self.default_database).bulk_create(bulk_item)
except OperationalError:
raise Exception("""
DRF API LOGGER EXCEPTION
Expand Down
Loading