feature(Django Logging):
Added Django Logging for backend
This commit is contained in:
parent
dad2b6788b
commit
aeb62c1a6b
|
@ -19,7 +19,7 @@ services:
|
||||||
ports:
|
ports:
|
||||||
- "8000:8000" # Map container port 8000 to host port 8000
|
- "8000:8000" # Map container port 8000 to host port 8000
|
||||||
environment:
|
environment:
|
||||||
- DEBUG=True
|
- DEBUG=FALSE
|
||||||
- SECRET_KEY=A_RANDOM_SECRET_KEY
|
- SECRET_KEY=A_RANDOM_SECRET_KEY
|
||||||
- ALLOWED_HOSTS=*
|
- ALLOWED_HOSTS=*
|
||||||
- CACHE=True
|
- CACHE=True
|
||||||
|
@ -39,7 +39,7 @@ services:
|
||||||
ports:
|
ports:
|
||||||
- "3000:3000" # Map container port 3000 to host port 3000
|
- "3000:3000" # Map container port 3000 to host port 3000
|
||||||
environment:
|
environment:
|
||||||
- REACT_APP_API_HOST=localhost
|
- REACT_APP_API_HOST=http://127.0.0.1
|
||||||
- REACT_APP_API_PORT=8000
|
- REACT_APP_API_PORT=8000
|
||||||
networks:
|
networks:
|
||||||
- dbnet
|
- dbnet
|
||||||
|
|
|
@ -14,6 +14,8 @@ from datetime import timedelta
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import environ
|
import environ
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
|
import datetime
|
||||||
|
|
||||||
env = environ.Env(
|
env = environ.Env(
|
||||||
# set casting, default value
|
# set casting, default value
|
||||||
|
@ -103,6 +105,15 @@ CORS_ALLOW_ALL_ORIGINS = True
|
||||||
CORS_EXPOSE_HEADERS = [
|
CORS_EXPOSE_HEADERS = [
|
||||||
'Content-Disposition',
|
'Content-Disposition',
|
||||||
'Content-Type',
|
'Content-Type',
|
||||||
|
'X-Forwarded-For',
|
||||||
|
'accept',
|
||||||
|
'accept-encoding',
|
||||||
|
'authorization',
|
||||||
|
'dnt',
|
||||||
|
'origin',
|
||||||
|
'user-agent',
|
||||||
|
'x-csrftoken',
|
||||||
|
'x-requested-with',
|
||||||
]
|
]
|
||||||
|
|
||||||
TEMPLATES = [
|
TEMPLATES = [
|
||||||
|
@ -158,7 +169,6 @@ if env.bool('CACHE', default=False):
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
PASSWORD_HASHERS = [
|
PASSWORD_HASHERS = [
|
||||||
"django.contrib.auth.hashers.Argon2PasswordHasher",
|
"django.contrib.auth.hashers.Argon2PasswordHasher",
|
||||||
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
|
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
|
||||||
|
@ -191,7 +201,6 @@ MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
|
||||||
if not os.path.exists(MEDIA_ROOT):
|
if not os.path.exists(MEDIA_ROOT):
|
||||||
os.makedirs(MEDIA_ROOT)
|
os.makedirs(MEDIA_ROOT)
|
||||||
|
|
||||||
|
|
||||||
# Internationalization
|
# Internationalization
|
||||||
# https://docs.djangoproject.com/en/4.2/topics/i18n/
|
# https://docs.djangoproject.com/en/4.2/topics/i18n/
|
||||||
|
|
||||||
|
@ -203,7 +212,6 @@ USE_I18N = True
|
||||||
|
|
||||||
USE_TZ = True
|
USE_TZ = True
|
||||||
|
|
||||||
|
|
||||||
# Static files (CSS, JavaScript, Images)
|
# Static files (CSS, JavaScript, Images)
|
||||||
# https://docs.djangoproject.com/en/4.2/howto/static-files/
|
# https://docs.djangoproject.com/en/4.2/howto/static-files/
|
||||||
|
|
||||||
|
@ -213,3 +221,42 @@ STATIC_URL = 'static/'
|
||||||
# https://docs.djangoproject.com/en/4.2/ref/settings/#default-auto-field
|
# https://docs.djangoproject.com/en/4.2/ref/settings/#default-auto-field
|
||||||
|
|
||||||
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
LOGGING_DIR = os.path.join(BASE_DIR, 'logs')
|
||||||
|
|
||||||
|
# Ensure the log directory exists
|
||||||
|
if not os.path.exists(LOGGING_DIR):
|
||||||
|
os.makedirs(LOGGING_DIR)
|
||||||
|
|
||||||
|
LOGGING = {
|
||||||
|
'version': 1,
|
||||||
|
'disable_existing_loggers': False,
|
||||||
|
'formatters': {
|
||||||
|
'verbose': {
|
||||||
|
'format': '{levelname} {asctime} {module} {message}',
|
||||||
|
'style': '{',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'handlers': {
|
||||||
|
'file': {
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'class': 'logging.FileHandler',
|
||||||
|
'filename': os.path.join(LOGGING_DIR,
|
||||||
|
'django.' + datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + '.log'),
|
||||||
|
'formatter': 'verbose',
|
||||||
|
},
|
||||||
|
'console': {
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'class': 'logging.StreamHandler', # Console handler
|
||||||
|
'formatter': 'verbose',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'loggers': {
|
||||||
|
'django': {
|
||||||
|
'handlers': ['file', 'console'], # Include both 'file' and 'console' handlers
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'propagate': True,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
import os
|
import os
|
||||||
import threading
|
import threading
|
||||||
|
import logging
|
||||||
import environ
|
|
||||||
import redis
|
import redis
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
|
|
||||||
class TrashCollector:
|
class TrashCollector:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
self.logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
self.stop_event = threading.Event()
|
self.stop_event = threading.Event()
|
||||||
self.thread = threading.Thread(target=self.run)
|
self.thread = threading.Thread(target=self.run)
|
||||||
self.media_root = settings.MEDIA_ROOT
|
self.media_root = settings.MEDIA_ROOT
|
||||||
|
@ -39,6 +40,7 @@ class TrashCollector:
|
||||||
file_path = os.path.join(self.media_root, file)
|
file_path = os.path.join(self.media_root, file)
|
||||||
try:
|
try:
|
||||||
if os.path.isfile(file_path):
|
if os.path.isfile(file_path):
|
||||||
|
self.logger.info(f"Deleting file {file_path}")
|
||||||
print(f"Deleting file {file_path}")
|
print(f"Deleting file {file_path}")
|
||||||
os.unlink(file_path)
|
os.unlink(file_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -60,6 +62,7 @@ class TrashCollector:
|
||||||
file_path = os.path.join(self.media_root, key.decode("utf-8"))
|
file_path = os.path.join(self.media_root, key.decode("utf-8"))
|
||||||
try:
|
try:
|
||||||
if os.path.isfile(file_path):
|
if os.path.isfile(file_path):
|
||||||
|
self.logger.info(f"Deleting file {file_path}")
|
||||||
print(f"Deleting file {file_path}")
|
print(f"Deleting file {file_path}")
|
||||||
os.unlink(file_path)
|
os.unlink(file_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -71,10 +74,10 @@ class TrashCollector:
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
trash_collector = TrashCollector()
|
trash_collector = TrashCollector()
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
try:
|
try:
|
||||||
print("Starting trash collector")
|
|
||||||
trash_collector.start()
|
trash_collector.start()
|
||||||
print("Trash collector started")
|
logger.info("Trash collector started")
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
trash_collector.stop()
|
trash_collector.stop()
|
||||||
print("Trash collector stopped")
|
logger.info("Trash collector stopped")
|
||||||
|
|
|
@ -3,6 +3,7 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import uuid
|
import uuid
|
||||||
|
import logging
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
|
|
||||||
import magic
|
import magic
|
||||||
|
@ -17,6 +18,8 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../utils/safeshar
|
||||||
|
|
||||||
import client
|
import client
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ManageItemsView(APIView):
|
class ManageItemsView(APIView):
|
||||||
TIMEOUT = 5
|
TIMEOUT = 5
|
||||||
|
@ -35,6 +38,9 @@ class ManageItemsView(APIView):
|
||||||
responses = []
|
responses = []
|
||||||
threads = []
|
threads = []
|
||||||
|
|
||||||
|
client_ip = get_client_ip(request)
|
||||||
|
logger.info(f"{request.method} request received from IP: {client_ip}")
|
||||||
|
|
||||||
for file in files:
|
for file in files:
|
||||||
thread = threading.Thread(target=self._save_file, args=(file, ttl, responses))
|
thread = threading.Thread(target=self._save_file, args=(file, ttl, responses))
|
||||||
threads.append(thread)
|
threads.append(thread)
|
||||||
|
@ -53,6 +59,7 @@ class ManageItemsView(APIView):
|
||||||
if not timeout_event.is_set():
|
if not timeout_event.is_set():
|
||||||
return Response(responses, status=201)
|
return Response(responses, status=201)
|
||||||
else:
|
else:
|
||||||
|
logger.error('File saving timed out')
|
||||||
return Response({'msg': 'File saving timed out'}, status=500)
|
return Response({'msg': 'File saving timed out'}, status=500)
|
||||||
finally:
|
finally:
|
||||||
timeout_timer.cancel()
|
timeout_timer.cancel()
|
||||||
|
@ -69,6 +76,7 @@ class ManageItemsView(APIView):
|
||||||
destination.write(chunk)
|
destination.write(chunk)
|
||||||
|
|
||||||
hash_signature = hasher.hexdigest()
|
hash_signature = hasher.hexdigest()
|
||||||
|
logger.info(f'File {filename} saved to {save_path} with hash signature {hash_signature}')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
grpc_client = client.Client()
|
grpc_client = client.Client()
|
||||||
|
@ -82,11 +90,16 @@ class ManageItemsView(APIView):
|
||||||
}
|
}
|
||||||
os.remove(save_path)
|
os.remove(save_path)
|
||||||
responses.append(response)
|
responses.append(response)
|
||||||
|
logger.warning(f'File {filename} is infected with a virus')
|
||||||
return
|
return
|
||||||
|
|
||||||
# Determine the MIME type of the file using python-magic
|
# Determine the MIME type of the file using python-magic
|
||||||
file_type = magic.Magic()
|
try:
|
||||||
mime_type = file_type.from_file(save_path)
|
file_type = magic.Magic()
|
||||||
|
mime_type = file_type.from_file(save_path)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f'Error detecting MIME type: {str(e)}')
|
||||||
|
mime_type = 'application/octet-stream'
|
||||||
|
|
||||||
# Store the file path, filename, MIME type, and other information in the cache
|
# Store the file path, filename, MIME type, and other information in the cache
|
||||||
cache.set(key, {
|
cache.set(key, {
|
||||||
|
@ -102,6 +115,7 @@ class ManageItemsView(APIView):
|
||||||
'msg': f"{key} successfully set to {filename} with TTL {ttl} seconds",
|
'msg': f"{key} successfully set to {filename} with TTL {ttl} seconds",
|
||||||
}
|
}
|
||||||
responses.append(response)
|
responses.append(response)
|
||||||
|
logger.info(f'File {filename} successfully saved to cache with key {key} and TTL {ttl} seconds')
|
||||||
|
|
||||||
|
|
||||||
class ManageItemView(APIView):
|
class ManageItemView(APIView):
|
||||||
|
@ -109,14 +123,17 @@ class ManageItemView(APIView):
|
||||||
value = cache.get(key)
|
value = cache.get(key)
|
||||||
|
|
||||||
if not value:
|
if not value:
|
||||||
|
logger.warning(f'Key {key} not found')
|
||||||
raise NotFound("Key not found")
|
raise NotFound("Key not found")
|
||||||
|
|
||||||
if 'path' not in value:
|
if 'path' not in value:
|
||||||
|
logger.warning(f'File not found')
|
||||||
raise NotFound("File not found")
|
raise NotFound("File not found")
|
||||||
|
|
||||||
file_path = value['path']
|
file_path = value['path']
|
||||||
|
|
||||||
if not os.path.exists(file_path):
|
if not os.path.exists(file_path):
|
||||||
|
logger.warning(f'File not found')
|
||||||
raise NotFound("File not found")
|
raise NotFound("File not found")
|
||||||
|
|
||||||
with open(file_path, 'rb') as f:
|
with open(file_path, 'rb') as f:
|
||||||
|
@ -130,17 +147,47 @@ class ManageItemView(APIView):
|
||||||
# Set the Content-Disposition with the original filename
|
# Set the Content-Disposition with the original filename
|
||||||
response['Content-Disposition'] = f'attachment; filename="{quote(os.path.basename(file_path))}"'
|
response['Content-Disposition'] = f'attachment; filename="{quote(os.path.basename(file_path))}"'
|
||||||
|
|
||||||
|
logger.info(f'File {file_path} successfully retrieved from cache with key {key}')
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def delete(self, request, key):
|
def delete(self, request, key):
|
||||||
value = cache.get(key)
|
value = cache.get(key)
|
||||||
|
|
||||||
if not value:
|
if not value:
|
||||||
|
logger.warning(f'Key {key} not found')
|
||||||
return Response({'msg': 'Not found'}, status=404)
|
return Response({'msg': 'Not found'}, status=404)
|
||||||
|
|
||||||
if 'path' in value and os.path.exists(value['path']):
|
if 'path' in value and os.path.exists(value['path']):
|
||||||
os.remove(value['path'])
|
os.remove(value['path'])
|
||||||
cache.delete(key)
|
cache.delete(key)
|
||||||
|
logger.info(f'File {value["path"]} successfully deleted from cache with key {key}')
|
||||||
return Response({'msg': f"{key} successfully deleted"}, status=200)
|
return Response({'msg': f"{key} successfully deleted"}, status=200)
|
||||||
|
|
||||||
|
logger.warning(f'File not found')
|
||||||
return Response({'msg': 'File not found'}, status=404)
|
return Response({'msg': 'File not found'}, status=404)
|
||||||
|
|
||||||
|
|
||||||
|
PRIVATE_IPS_PREFIX = ('10.', '172.', '192.')
|
||||||
|
|
||||||
|
|
||||||
|
def get_client_ip(request):
|
||||||
|
"""get the client ip from the request
|
||||||
|
"""
|
||||||
|
# remote_address = request.META.get('REMOTE_ADDR')
|
||||||
|
remote_address = request.META.get('HTTP_X_FORWARDED_FOR') or request.META.get('REMOTE_ADDR')
|
||||||
|
# set the default value of the ip to be the REMOTE_ADDR if available
|
||||||
|
# else None
|
||||||
|
ip = remote_address
|
||||||
|
# try to get the first non-proxy ip (not a private ip) from the
|
||||||
|
# HTTP_X_FORWARDED_FOR
|
||||||
|
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||||
|
if x_forwarded_for:
|
||||||
|
proxies = x_forwarded_for.split(',')
|
||||||
|
# remove the private ips from the beginning
|
||||||
|
while len(proxies) > 0 and proxies[0].startswith(PRIVATE_IPS_PREFIX):
|
||||||
|
proxies.pop(0)
|
||||||
|
# take the first ip which is not a private one (of a proxy)
|
||||||
|
if len(proxies) > 0:
|
||||||
|
ip = proxies[0]
|
||||||
|
print(ip)
|
||||||
|
return ip
|
||||||
|
|
Loading…
Reference in New Issue