feature(Django Logging):

Added Django Logging for backend
This commit is contained in:
Devoalda 2023-10-31 09:23:06 +08:00
parent dad2b6788b
commit aeb62c1a6b
4 changed files with 110 additions and 13 deletions

View File

@ -19,7 +19,7 @@ services:
ports:
- "8000:8000" # Map container port 8000 to host port 8000
environment:
- DEBUG=True
- DEBUG=FALSE
- SECRET_KEY=A_RANDOM_SECRET_KEY
- ALLOWED_HOSTS=*
- CACHE=True
@ -39,7 +39,7 @@ services:
ports:
- "3000:3000" # Map container port 3000 to host port 3000
environment:
- REACT_APP_API_HOST=localhost
- REACT_APP_API_HOST=http://127.0.0.1
- REACT_APP_API_PORT=8000
networks:
- dbnet

View File

@ -14,6 +14,8 @@ from datetime import timedelta
from pathlib import Path
import environ
import os
import logging
import datetime
env = environ.Env(
# set casting, default value
@ -103,6 +105,15 @@ CORS_ALLOW_ALL_ORIGINS = True
CORS_EXPOSE_HEADERS = [
'Content-Disposition',
'Content-Type',
'X-Forwarded-For',
'accept',
'accept-encoding',
'authorization',
'dnt',
'origin',
'user-agent',
'x-csrftoken',
'x-requested-with',
]
TEMPLATES = [
@ -158,7 +169,6 @@ if env.bool('CACHE', default=False):
}
}
PASSWORD_HASHERS = [
"django.contrib.auth.hashers.Argon2PasswordHasher",
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
@ -191,7 +201,6 @@ MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
if not os.path.exists(MEDIA_ROOT):
os.makedirs(MEDIA_ROOT)
# Internationalization
# https://docs.djangoproject.com/en/4.2/topics/i18n/
@ -203,7 +212,6 @@ USE_I18N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/4.2/howto/static-files/
@ -213,3 +221,42 @@ STATIC_URL = 'static/'
# https://docs.djangoproject.com/en/4.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
# Logging
LOGGING_DIR = os.path.join(BASE_DIR, 'logs')
# Ensure the log directory exists
if not os.path.exists(LOGGING_DIR):
os.makedirs(LOGGING_DIR)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '{levelname} {asctime} {module} {message}',
'style': '{',
},
},
'handlers': {
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.path.join(LOGGING_DIR,
'django.' + datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + '.log'),
'formatter': 'verbose',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler', # Console handler
'formatter': 'verbose',
},
},
'loggers': {
'django': {
'handlers': ['file', 'console'], # Include both 'file' and 'console' handlers
'level': 'DEBUG',
'propagate': True,
},
},
}

View File

@ -1,13 +1,14 @@
import os
import threading
import environ
import logging
import redis
from django.conf import settings
class TrashCollector:
def __init__(self):
self.logger = logging.getLogger(__name__)
self.stop_event = threading.Event()
self.thread = threading.Thread(target=self.run)
self.media_root = settings.MEDIA_ROOT
@ -39,6 +40,7 @@ class TrashCollector:
file_path = os.path.join(self.media_root, file)
try:
if os.path.isfile(file_path):
self.logger.info(f"Deleting file {file_path}")
print(f"Deleting file {file_path}")
os.unlink(file_path)
except Exception as e:
@ -60,6 +62,7 @@ class TrashCollector:
file_path = os.path.join(self.media_root, key.decode("utf-8"))
try:
if os.path.isfile(file_path):
self.logger.info(f"Deleting file {file_path}")
print(f"Deleting file {file_path}")
os.unlink(file_path)
except Exception as e:
@ -71,10 +74,10 @@ class TrashCollector:
if __name__ == '__main__':
trash_collector = TrashCollector()
logger = logging.getLogger(__name__)
try:
print("Starting trash collector")
trash_collector.start()
print("Trash collector started")
logger.info("Trash collector started")
except KeyboardInterrupt:
trash_collector.stop()
print("Trash collector stopped")
logger.info("Trash collector stopped")

View File

@ -3,6 +3,7 @@ import os
import sys
import threading
import uuid
import logging
from urllib.parse import quote
import magic
@ -17,6 +18,8 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../utils/safeshar
import client
logger = logging.getLogger(__name__)
class ManageItemsView(APIView):
TIMEOUT = 5
@ -35,6 +38,9 @@ class ManageItemsView(APIView):
responses = []
threads = []
client_ip = get_client_ip(request)
logger.info(f"{request.method} request received from IP: {client_ip}")
for file in files:
thread = threading.Thread(target=self._save_file, args=(file, ttl, responses))
threads.append(thread)
@ -53,6 +59,7 @@ class ManageItemsView(APIView):
if not timeout_event.is_set():
return Response(responses, status=201)
else:
logger.error('File saving timed out')
return Response({'msg': 'File saving timed out'}, status=500)
finally:
timeout_timer.cancel()
@ -69,6 +76,7 @@ class ManageItemsView(APIView):
destination.write(chunk)
hash_signature = hasher.hexdigest()
logger.info(f'File {filename} saved to {save_path} with hash signature {hash_signature}')
try:
grpc_client = client.Client()
@ -82,11 +90,16 @@ class ManageItemsView(APIView):
}
os.remove(save_path)
responses.append(response)
logger.warning(f'File {filename} is infected with a virus')
return
# Determine the MIME type of the file using python-magic
try:
file_type = magic.Magic()
mime_type = file_type.from_file(save_path)
except Exception as e:
logger.warning(f'Error detecting MIME type: {str(e)}')
mime_type = 'application/octet-stream'
# Store the file path, filename, MIME type, and other information in the cache
cache.set(key, {
@ -102,6 +115,7 @@ class ManageItemsView(APIView):
'msg': f"{key} successfully set to {filename} with TTL {ttl} seconds",
}
responses.append(response)
logger.info(f'File {filename} successfully saved to cache with key {key} and TTL {ttl} seconds')
class ManageItemView(APIView):
@ -109,14 +123,17 @@ class ManageItemView(APIView):
value = cache.get(key)
if not value:
logger.warning(f'Key {key} not found')
raise NotFound("Key not found")
if 'path' not in value:
logger.warning(f'File not found')
raise NotFound("File not found")
file_path = value['path']
if not os.path.exists(file_path):
logger.warning(f'File not found')
raise NotFound("File not found")
with open(file_path, 'rb') as f:
@ -130,17 +147,47 @@ class ManageItemView(APIView):
# Set the Content-Disposition with the original filename
response['Content-Disposition'] = f'attachment; filename="{quote(os.path.basename(file_path))}"'
logger.info(f'File {file_path} successfully retrieved from cache with key {key}')
return response
def delete(self, request, key):
value = cache.get(key)
if not value:
logger.warning(f'Key {key} not found')
return Response({'msg': 'Not found'}, status=404)
if 'path' in value and os.path.exists(value['path']):
os.remove(value['path'])
cache.delete(key)
logger.info(f'File {value["path"]} successfully deleted from cache with key {key}')
return Response({'msg': f"{key} successfully deleted"}, status=200)
logger.warning(f'File not found')
return Response({'msg': 'File not found'}, status=404)
PRIVATE_IPS_PREFIX = ('10.', '172.', '192.')
def get_client_ip(request):
"""get the client ip from the request
"""
# remote_address = request.META.get('REMOTE_ADDR')
remote_address = request.META.get('HTTP_X_FORWARDED_FOR') or request.META.get('REMOTE_ADDR')
# set the default value of the ip to be the REMOTE_ADDR if available
# else None
ip = remote_address
# try to get the first non-proxy ip (not a private ip) from the
# HTTP_X_FORWARDED_FOR
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
proxies = x_forwarded_for.split(',')
# remove the private ips from the beginning
while len(proxies) > 0 and proxies[0].startswith(PRIVATE_IPS_PREFIX):
proxies.pop(0)
# take the first ip which is not a private one (of a proxy)
if len(proxies) > 0:
ip = proxies[0]
print(ip)
return ip