this is multiple device support log
This commit is contained in:
parent
b4344e3dbd
commit
889d7471b7
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,46 @@
|
|||||||
|
# Generated by Django 5.1.3 on 2024-12-10 09:51
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('Accounts', '0003_userprofile_company_name'),
|
||||||
|
('Dashboard', '0005_restoredatabase'),
|
||||||
|
('Device', '0007_devices_mac_address_devices_unique_id'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='DdosPrediction',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('file_path', models.FileField(upload_to='ddos_predictions/')),
|
||||||
|
('uploaded_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('device', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Device.devices')),
|
||||||
|
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Accounts.userprofile')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Rensomware_AuditPrediction',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('file_path', models.FileField(max_length=555, upload_to='ransomware_predictions/')),
|
||||||
|
('uploaded_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('device', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Device.devices')),
|
||||||
|
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Accounts.userprofile')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Rensomware_TypePrediction',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('file_path', models.FileField(max_length=555, upload_to='ransomware_predictions/')),
|
||||||
|
('uploaded_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('device', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Device.devices')),
|
||||||
|
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Accounts.userprofile')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
Binary file not shown.
@ -1,5 +1,7 @@
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
|
|
||||||
|
from Accounts .models import UserProfile
|
||||||
|
from Device.models import Devices
|
||||||
# Create your models here.
|
# Create your models here.
|
||||||
class Status(models.Model):
|
class Status(models.Model):
|
||||||
number = models.CharField(max_length=15)
|
number = models.CharField(max_length=15)
|
||||||
@ -23,4 +25,35 @@ class RestoreDatabase(models.Model):
|
|||||||
value = models.CharField(max_length=15, default="0")
|
value = models.CharField(max_length=15, default="0")
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"Number (ID: {self.id}, Status: {self.value})"
|
return f"Number (ID: {self.id}, Status: {self.value})"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class DdosPrediction(models.Model):
|
||||||
|
device = models.ForeignKey(Devices, on_delete=models.CASCADE)
|
||||||
|
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE) # Add this field to reference the user
|
||||||
|
file_path = models.FileField(upload_to='ddos_predictions/')
|
||||||
|
uploaded_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Prediction for {self.device.device_name} by {self.user.user.username} at {self.uploaded_at}"
|
||||||
|
|
||||||
|
class Rensomware_TypePrediction(models.Model):
|
||||||
|
device = models.ForeignKey(Devices, on_delete=models.CASCADE)
|
||||||
|
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE) # Add this field to reference the user
|
||||||
|
file_path = models.FileField(upload_to='ransomware_predictions/', max_length=555)
|
||||||
|
uploaded_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Prediction for {self.device.device_name} by {self.user.user.username} at {self.uploaded_at}"
|
||||||
|
|
||||||
|
class Rensomware_AuditPrediction(models.Model):
|
||||||
|
device = models.ForeignKey(Devices, on_delete=models.CASCADE)
|
||||||
|
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE) # Add this field to reference the user
|
||||||
|
file_path = models.FileField(upload_to='ransomware_predictions/',max_length=555)
|
||||||
|
uploaded_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Prediction for {self.device.device_name} by {self.user.user.username} at {self.uploaded_at}"
|
||||||
|
|
||||||
|
|
||||||
@ -47,7 +47,7 @@ urlpatterns = [
|
|||||||
# DDoS
|
# DDoS
|
||||||
|
|
||||||
path('generate/', generate_random_values, name='generate_random_values'),
|
path('generate/', generate_random_values, name='generate_random_values'),
|
||||||
path('fetch_ddos_value/', fetch_ddos_value, name='fetch_ddos_value'),
|
path('ddos/fetch_ddos_value/', fetch_ddos_value, name='fetch_ddos_value'),
|
||||||
|
|
||||||
#shadow script
|
#shadow script
|
||||||
path('status1/', views.get_number_status1, name='get_number_status1'),
|
path('status1/', views.get_number_status1, name='get_number_status1'),
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
BIN
helpdesk/management/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
helpdesk/management/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
26
malware/migrations/0003_malwarepredictionsdevice.py
Normal file
26
malware/migrations/0003_malwarepredictionsdevice.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 5.1.3 on 2024-12-10 09:51
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('Accounts', '0003_userprofile_company_name'),
|
||||||
|
('Device', '0007_devices_mac_address_devices_unique_id'),
|
||||||
|
('malware', '0002_malwareprediction_model_type'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='MalwarePredictionsDevice',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('file_path', models.FileField(upload_to='malware_predictions/')),
|
||||||
|
('uploaded_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('device', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='Device.devices')),
|
||||||
|
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='Accounts.userprofile')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
Binary file not shown.
@ -1,4 +1,6 @@
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
|
from Device .models import Devices
|
||||||
|
from Accounts.models import UserProfile
|
||||||
|
|
||||||
# Create your models here.
|
# Create your models here.
|
||||||
|
|
||||||
@ -19,3 +21,13 @@ class MalwarePrediction(models.Model):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"{self.process_name} - {self.predicted_malware} - {self.get_model_type_display()}"
|
return f"{self.process_name} - {self.predicted_malware} - {self.get_model_type_display()}"
|
||||||
|
|
||||||
|
|
||||||
|
class MalwarePredictionsDevice(models.Model):
|
||||||
|
device = models.ForeignKey(Devices, on_delete=models.CASCADE,null=True)
|
||||||
|
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE , null=True) # Add this field to reference the user
|
||||||
|
file_path = models.FileField(upload_to='malware_predictions/')
|
||||||
|
uploaded_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Prediction for {self.device.device_name} by {self.user.user.username} at {self.uploaded_at}"
|
||||||
|
|
||||||
693
malware/views.py
693
malware/views.py
@ -18,12 +18,386 @@ from django.core.files.storage import default_storage
|
|||||||
from rest_framework.parsers import MultiPartParser
|
from rest_framework.parsers import MultiPartParser
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
from .models import MalwarePrediction
|
from .models import MalwarePrediction,MalwarePredictionsDevice
|
||||||
from .serializers import MalwarePredictionSerializer
|
from .serializers import MalwarePredictionSerializer
|
||||||
|
from Device.models import Devices
|
||||||
|
from Accounts.models import UserProfile
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.http import JsonResponse
|
||||||
|
from django.utils.decorators import method_decorator
|
||||||
|
from django.views.decorators.csrf import csrf_exempt
|
||||||
|
|
||||||
class MalwarePredictionAPIView(APIView):
|
class MalwarePredictionAPIView(APIView):
|
||||||
parser_classes = [MultiPartParser] # To handle file uploads
|
parser_classes = [MultiPartParser] # To handle file uploads
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_device_ids_by_user_id(user_id):
|
||||||
|
try:
|
||||||
|
# Get the UserProfile instance using the user ID
|
||||||
|
user_profile = UserProfile.objects.get(user__id=user_id)
|
||||||
|
print('user_profile', user_profile)
|
||||||
|
|
||||||
|
# Retrieve all Devices associated with this UserProfile
|
||||||
|
devices = Devices.objects.filter(used_by=user_profile)
|
||||||
|
print('devices', devices)
|
||||||
|
|
||||||
|
# Get the device IDs
|
||||||
|
device_ids = [device.id for device in devices]
|
||||||
|
return device_ids
|
||||||
|
except UserProfile.DoesNotExist:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def post(self, request, *args, **kwargs):
|
||||||
|
if 'csv_file' not in request.FILES:
|
||||||
|
return Response({"error": "No file provided"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
csv_file = request.FILES['csv_file']
|
||||||
|
user_id= request.data.get('user_id')
|
||||||
|
if not user_id :
|
||||||
|
return Response({"error": "User ID is required"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
device_ids = self.get_device_ids_by_user_id(user_id)
|
||||||
|
print(f"Device IDs: {device_ids}")
|
||||||
|
if not device_ids:
|
||||||
|
return Response({'error': 'No devices associated with the given user ID'}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# device = Devices.objects.filter(id__in=device_ids).order_by('-created_at').first() # Use the first device ID
|
||||||
|
|
||||||
|
|
||||||
|
# Get the most recent device associated with the user
|
||||||
|
device = Devices.objects.get(id=device_ids[-1])
|
||||||
|
print(f"Device ID: {device.id}")
|
||||||
|
except Devices.DoesNotExist:
|
||||||
|
return Response({"error": "Device not found for the given device ID"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Define the temp directory path
|
||||||
|
temp_dir = os.path.join(settings.MEDIA_ROOT, 'malware_predictions',str(device.id))
|
||||||
|
|
||||||
|
# Create the 'temp' directory if it doesn't exist
|
||||||
|
if not os.path.exists(temp_dir):
|
||||||
|
os.makedirs(temp_dir)
|
||||||
|
|
||||||
|
# Save the file temporarily
|
||||||
|
temp_file_path = os.path.join(temp_dir, csv_file.name)
|
||||||
|
with default_storage.open(temp_file_path, 'wb+') as destination:
|
||||||
|
for chunk in csv_file.chunks():
|
||||||
|
destination.write(chunk)
|
||||||
|
|
||||||
|
# Read the CSV file with headers
|
||||||
|
df = pd.read_csv(temp_file_path)
|
||||||
|
|
||||||
|
# Extract column names from the CSV
|
||||||
|
actual_columns = df.columns.tolist()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return Response({"error": "Could not read the CSV file", "details": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# Define the expected column names
|
||||||
|
expected_columns = ['process_name', 'class', 'probability_of_malware']
|
||||||
|
|
||||||
|
# Mapping logic
|
||||||
|
if actual_columns != expected_columns:
|
||||||
|
# Map actual column names to expected ones
|
||||||
|
column_mapping = dict(zip(actual_columns, expected_columns))
|
||||||
|
df.rename(columns=column_mapping, inplace=True)
|
||||||
|
|
||||||
|
# Continue with processing the dataframe...
|
||||||
|
|
||||||
|
records_saved = 0
|
||||||
|
for _, row in df.iterrows():
|
||||||
|
process_name = row['process_name']
|
||||||
|
process_class = row['class']
|
||||||
|
probability = row['probability_of_malware']
|
||||||
|
|
||||||
|
MalwarePrediction.objects.create(
|
||||||
|
process_name=process_name,
|
||||||
|
process_class=process_class,
|
||||||
|
probability_of_malware=probability,
|
||||||
|
)
|
||||||
|
records_saved += 1
|
||||||
|
# print(data_sent,"dataaaaaa")
|
||||||
|
|
||||||
|
return Response({"message": f"{records_saved} records saved successfully!"}, status=status.HTTP_201_CREATED)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
# Query all MalwarePrediction records from the database
|
||||||
|
predictions = MalwarePrediction.objects.all()
|
||||||
|
|
||||||
|
if not predictions.exists():
|
||||||
|
return Response({"error": "No data available to generate graph."}, status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
# Create a DataFrame from the queryset
|
||||||
|
data = {
|
||||||
|
'process_name': [p.process_name for p in predictions],
|
||||||
|
'class': [p.process_class for p in predictions],
|
||||||
|
'probability_of_malware': [p.probability_of_malware for p in predictions]
|
||||||
|
}
|
||||||
|
df = pd.DataFrame(data)
|
||||||
|
|
||||||
|
# Plot using seaborn or matplotlib
|
||||||
|
plt.figure(figsize=(10, 6))
|
||||||
|
|
||||||
|
# Create a barplot where the class is on the x-axis and the probability is on the y-axis
|
||||||
|
sns.barplot(
|
||||||
|
data=df,
|
||||||
|
x='class', # Independent variable (x-axis)
|
||||||
|
y='probability_of_malware', # Dependent variable (y-axis)
|
||||||
|
ci=None, # No confidence intervals
|
||||||
|
palette='Set2' # Use a color palette for different classes
|
||||||
|
)
|
||||||
|
|
||||||
|
plt.title('Malware Probability by Class')
|
||||||
|
plt.xlabel('Class')
|
||||||
|
plt.ylabel('Probability of Malware')
|
||||||
|
plt.tight_layout()
|
||||||
|
|
||||||
|
# Save the plot to a bytes buffer
|
||||||
|
buf = io.BytesIO()
|
||||||
|
plt.savefig(buf, format='png')
|
||||||
|
buf.seek(0)
|
||||||
|
|
||||||
|
# Return the image as a response
|
||||||
|
return HttpResponse(buf, content_type='image/png')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# class MalwarePredictionAPIView(APIView):
|
||||||
|
# parser_classes = [MultiPartParser] # To handle file uploads
|
||||||
|
|
||||||
|
# @staticmethod
|
||||||
|
# def get_device_ids_by_user_id(user_id):
|
||||||
|
# try:
|
||||||
|
# # Get the UserProfile instance using the user ID
|
||||||
|
# user_profile = UserProfile.objects.get(user__id=user_id)
|
||||||
|
# print('user_profile', user_profile)
|
||||||
|
|
||||||
|
# # Retrieve all Devices associated with this UserProfile
|
||||||
|
# devices = Devices.objects.filter(used_by=user_profile)
|
||||||
|
# print('devices', devices)
|
||||||
|
|
||||||
|
# # Get the device IDs
|
||||||
|
# device_ids = [device.id for device in devices]
|
||||||
|
# return device_ids
|
||||||
|
# except UserProfile.DoesNotExist:
|
||||||
|
# return []
|
||||||
|
|
||||||
|
# def post(self, request, *args, **kwargs):
|
||||||
|
# if 'csv_file' not in request.FILES:
|
||||||
|
# return Response({"error": "No file provided"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# csv_file = request.FILES['csv_file']
|
||||||
|
|
||||||
|
# # Retrieve user ID from the request
|
||||||
|
# user_id = request.data.get('user_id') # Ensure 'user_id' is being sent in the request body
|
||||||
|
# print("user_id ", user_id)
|
||||||
|
|
||||||
|
# if not user_id:
|
||||||
|
# return Response({"error": "User ID is required"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# # Get the device IDs associated with the user
|
||||||
|
# device_ids = self.get_device_ids_by_user_id(user_id)
|
||||||
|
# print(f"Device IDs: {device_ids}")
|
||||||
|
|
||||||
|
# # Fetch the first associated device for the user
|
||||||
|
# if not device_ids:
|
||||||
|
# return Response({'error': 'No devices associated with the given user ID'}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# try:
|
||||||
|
# # device = Devices.objects.filter(id__in=device_ids).order_by('-created_at').first() # Use the first device ID
|
||||||
|
# device = Devices.objects.filter(used_by__user=request.user).order_by('-id').first()
|
||||||
|
# print(f"Device ID: {device.id}")
|
||||||
|
# except Devices.DoesNotExist:
|
||||||
|
# return Response({"error": "Device not found for the given device ID"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# try:
|
||||||
|
# user_profile = UserProfile.objects.get(user__id=user_id)
|
||||||
|
# print(user_profile)
|
||||||
|
# except UserProfile.DoesNotExist:
|
||||||
|
# return Response({"error": "User profile not found"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# try:
|
||||||
|
# # Define the temp directory path using the device ID
|
||||||
|
# temp_dir = os.path.join(settings.MEDIA_ROOT, 'malware_predictions')
|
||||||
|
|
||||||
|
# # Create the temp directory if it doesn't exist
|
||||||
|
# if not os.path.exists(temp_dir):
|
||||||
|
# os.makedirs(temp_dir)
|
||||||
|
|
||||||
|
# # Save the file temporarily
|
||||||
|
# temp_file_path = os.path.join(temp_dir, csv_file.name)
|
||||||
|
# with default_storage.open(temp_file_path, 'wb+') as destination:
|
||||||
|
# for chunk in csv_file.chunks():
|
||||||
|
# destination.write(chunk)
|
||||||
|
|
||||||
|
# # Read the CSV file with headers
|
||||||
|
# df = pd.read_csv(temp_file_path)
|
||||||
|
|
||||||
|
# # Extract column names from the CSV
|
||||||
|
# actual_columns = df.columns.tolist()
|
||||||
|
# except Exception as e:
|
||||||
|
# return Response({"error": "Could not read the CSV file", "details": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# # Define the expected column names
|
||||||
|
# expected_columns = ['process_name', 'class', 'probability_of_malware']
|
||||||
|
|
||||||
|
# # Mapping logic
|
||||||
|
# if actual_columns != expected_columns:
|
||||||
|
# # Map actual column names to expected ones
|
||||||
|
# column_mapping = dict(zip(actual_columns, expected_columns))
|
||||||
|
# df.rename(columns=column_mapping, inplace=True)
|
||||||
|
|
||||||
|
# # Save the data to the database
|
||||||
|
# records_saved = 0
|
||||||
|
# for _, row in df.iterrows():
|
||||||
|
# try:
|
||||||
|
# process_name = row['process_name']
|
||||||
|
# process_class = row['class']
|
||||||
|
# probability = float(row['probability_of_malware']) # Ensure it's a number
|
||||||
|
# except ValueError:
|
||||||
|
# return Response({
|
||||||
|
# "error": f"Invalid value in 'probability_of_malware': {row['probability_of_malware']}"
|
||||||
|
# }, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# # MalwarePredictionsDevice.objects.create(
|
||||||
|
# # device=device, # Pass the Devices instance here
|
||||||
|
# # user=user_profile, # This will reference the user related to the device
|
||||||
|
# # file_path=temp_file_path, # The path to the uploaded file
|
||||||
|
# # )
|
||||||
|
# MalwarePrediction.objects.create(
|
||||||
|
# process_name=process_name,
|
||||||
|
# process_class=process_class,
|
||||||
|
# probability_of_malware=probability,
|
||||||
|
# )
|
||||||
|
# records_saved += 1
|
||||||
|
|
||||||
|
# return Response({
|
||||||
|
# "message": f"{records_saved} records saved successfully!",
|
||||||
|
# }, status=status.HTTP_201_CREATED)
|
||||||
|
|
||||||
|
|
||||||
|
# class MalwarePredictionAPIView(APIView):
|
||||||
|
# parser_classes = [MultiPartParser] # To handle file uploads
|
||||||
|
# @staticmethod
|
||||||
|
# def get_device_ids_by_user_id(user_id):
|
||||||
|
# try:
|
||||||
|
# # Get the UserProfile instance using the user ID
|
||||||
|
# user_profile = UserProfile.objects.get(user__id=user_id)
|
||||||
|
# print('user_profile', user_profile)
|
||||||
|
|
||||||
|
# # Retrieve all Devices associated with this UserProfile
|
||||||
|
# devices = Devices.objects.filter(used_by=user_profile)
|
||||||
|
# print('devices', devices)
|
||||||
|
|
||||||
|
# # Get the device IDs
|
||||||
|
# device_ids = [device.id for device in devices]
|
||||||
|
# return device_ids
|
||||||
|
# except UserProfile.DoesNotExist:
|
||||||
|
# return []
|
||||||
|
|
||||||
|
# def post(self, request, *args, **kwargs):
|
||||||
|
# if 'csv_file' not in request.FILES:
|
||||||
|
# return Response({"error": "No file provided"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# csv_file = request.FILES.get('csv_file')
|
||||||
|
# if not csv_file:
|
||||||
|
# return Response({"error": "No CSV file provided"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# user_id = request.data.get('user_id')
|
||||||
|
# if not user_id:
|
||||||
|
# return Response({"error": "User ID is required"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# # Retrieve associated device IDs for the user
|
||||||
|
# device_ids = self.get_device_ids_by_user_id(user_id)
|
||||||
|
# print(device_ids)
|
||||||
|
# if not device_ids:
|
||||||
|
# return Response({"error": "No devices associated with the given user ID"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# # Try to get therecent device associated with the user
|
||||||
|
# try:
|
||||||
|
# device = Devices.objects.get(id=device_ids[-1])
|
||||||
|
# except Devices.DoesNotExist:
|
||||||
|
# return Response({"error": "Device not found for the given device ID"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# # Define the temp directory path for saving the file
|
||||||
|
# temp_dir = os.path.join(settings.MEDIA_ROOT, 'malware_predictions', f'device_{device.id}')
|
||||||
|
|
||||||
|
# # Create the 'temp' directory if it doesn't exist
|
||||||
|
# if not os.path.exists(temp_dir):
|
||||||
|
# os.makedirs(temp_dir)
|
||||||
|
|
||||||
|
# # Save the file temporarily
|
||||||
|
# temp_file_path = os.path.join(temp_dir, csv_file.name)
|
||||||
|
# try:
|
||||||
|
# with default_storage.open(temp_file_path, 'wb+') as destination:
|
||||||
|
# for chunk in csv_file.chunks():
|
||||||
|
# destination.write(chunk)
|
||||||
|
# except Exception as e:
|
||||||
|
# return Response({"error": "Failed to save the file", "details": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||||
|
|
||||||
|
# # Read the CSV file with headers
|
||||||
|
# try:
|
||||||
|
# df = pd.read_csv(temp_file_path)
|
||||||
|
# actual_columns = df.columns.tolist()
|
||||||
|
# except Exception as e:
|
||||||
|
# return Response({"error": "Could not read the CSV file", "details": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# # Define the expected column names
|
||||||
|
# expected_columns = ['process_name', 'class', 'probability_of_malware']
|
||||||
|
|
||||||
|
# # Validate and map columns
|
||||||
|
# if actual_columns != expected_columns:
|
||||||
|
# if len(actual_columns) == len(expected_columns):
|
||||||
|
# column_mapping = dict(zip(actual_columns, expected_columns))
|
||||||
|
# df.rename(columns=column_mapping, inplace=True)
|
||||||
|
# else:
|
||||||
|
# return Response({"error": "CSV columns do not match expected format"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# # Ensure the user profile exists
|
||||||
|
# try:
|
||||||
|
# user_profile = UserProfile.objects.get(user__id=user_id)
|
||||||
|
# except UserProfile.DoesNotExist:
|
||||||
|
# return Response({"error": "User profile not found"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# # Save the predictions and create the related record
|
||||||
|
# records_saved = 0
|
||||||
|
# for _, row in df.iterrows():
|
||||||
|
# process_name = row['process_name']
|
||||||
|
# process_class = row['class']
|
||||||
|
# probability = row['probability_of_malware']
|
||||||
|
|
||||||
|
# try:
|
||||||
|
# # Save malware prediction
|
||||||
|
# MalwarePrediction.objects.create(
|
||||||
|
# process_name=process_name,
|
||||||
|
# process_class=process_class,
|
||||||
|
# probability_of_malware=probability,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # Save the device association
|
||||||
|
# MalwarePredictionsDevice.objects.create(
|
||||||
|
# device=device,
|
||||||
|
# user=user_profile,
|
||||||
|
# file_path=temp_file_path,
|
||||||
|
# )
|
||||||
|
# records_saved += 1
|
||||||
|
# except Exception as e:
|
||||||
|
# return Response({"error": "Failed to save record", "details": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||||
|
|
||||||
|
# return Response({
|
||||||
|
# "message": f"{records_saved} records saved successfully!"
|
||||||
|
# }, status=status.HTTP_201_CREATED)
|
||||||
|
|
||||||
|
|
||||||
|
class KNeighborsModelView(APIView):
|
||||||
|
parser_classes = [MultiPartParser] # To handle file uploads
|
||||||
|
|
||||||
|
|
||||||
def post(self, request, *args, **kwargs):
|
def post(self, request, *args, **kwargs):
|
||||||
if 'csv_file' not in request.FILES:
|
if 'csv_file' not in request.FILES:
|
||||||
@ -75,18 +449,14 @@ class MalwarePredictionAPIView(APIView):
|
|||||||
process_name=process_name,
|
process_name=process_name,
|
||||||
process_class=process_class,
|
process_class=process_class,
|
||||||
probability_of_malware=probability,
|
probability_of_malware=probability,
|
||||||
|
model_type=1
|
||||||
)
|
)
|
||||||
records_saved += 1
|
records_saved += 1
|
||||||
|
|
||||||
return Response({"message": f"{records_saved} records saved successfully!"}, status=status.HTTP_201_CREATED)
|
return Response({"message": " knn file saved successfully!"}, status=status.HTTP_201_CREATED)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get(self, request, *args, **kwargs):
|
def get(self, request, *args, **kwargs):
|
||||||
# Query all MalwarePrediction records from the database
|
# Query all MalwarePrediction records from the database
|
||||||
predictions = MalwarePrediction.objects.all()
|
predictions = MalwarePrediction.objects.filter(model_type=1)
|
||||||
|
|
||||||
if not predictions.exists():
|
if not predictions.exists():
|
||||||
return Response({"error": "No data available to generate graph."}, status=status.HTTP_404_NOT_FOUND)
|
return Response({"error": "No data available to generate graph."}, status=status.HTTP_404_NOT_FOUND)
|
||||||
@ -126,6 +496,313 @@ class MalwarePredictionAPIView(APIView):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class RandomForestModelView(APIView):
|
||||||
|
parser_classes = [MultiPartParser] # To handle file uploads
|
||||||
|
|
||||||
|
|
||||||
|
def post(self, request, *args, **kwargs):
|
||||||
|
if 'csv_file' not in request.FILES:
|
||||||
|
return Response({"error": "No file provided"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
csv_file = request.FILES['csv_file']
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Define the temp directory path
|
||||||
|
temp_dir = os.path.join(settings.MEDIA_ROOT, 'temp')
|
||||||
|
|
||||||
|
# Create the 'temp' directory if it doesn't exist
|
||||||
|
if not os.path.exists(temp_dir):
|
||||||
|
os.makedirs(temp_dir)
|
||||||
|
|
||||||
|
# Save the file temporarily
|
||||||
|
temp_file_path = os.path.join(temp_dir, csv_file.name)
|
||||||
|
with default_storage.open(temp_file_path, 'wb+') as destination:
|
||||||
|
for chunk in csv_file.chunks():
|
||||||
|
destination.write(chunk)
|
||||||
|
|
||||||
|
# Read the CSV file with headers
|
||||||
|
df = pd.read_csv(temp_file_path)
|
||||||
|
|
||||||
|
# Extract column names from the CSV
|
||||||
|
actual_columns = df.columns.tolist()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return Response({"error": "Could not read the CSV file", "details": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# Define the expected column names
|
||||||
|
expected_columns = ['process_name', 'class', 'probability_of_malware']
|
||||||
|
|
||||||
|
# Mapping logic
|
||||||
|
if actual_columns != expected_columns:
|
||||||
|
# Map actual column names to expected ones
|
||||||
|
column_mapping = dict(zip(actual_columns, expected_columns))
|
||||||
|
df.rename(columns=column_mapping, inplace=True)
|
||||||
|
|
||||||
|
# Continue with processing the dataframe...
|
||||||
|
records_saved = 0
|
||||||
|
for _, row in df.iterrows():
|
||||||
|
process_name = row['process_name']
|
||||||
|
process_class = row['class']
|
||||||
|
probability = row['probability_of_malware']
|
||||||
|
|
||||||
|
# Save the row to the database
|
||||||
|
MalwarePrediction.objects.create(
|
||||||
|
process_name=process_name,
|
||||||
|
process_class=process_class,
|
||||||
|
probability_of_malware=probability,
|
||||||
|
model_type=2
|
||||||
|
)
|
||||||
|
records_saved += 1
|
||||||
|
|
||||||
|
return Response({"message": " RandomForest file saved successfully!"}, status=status.HTTP_201_CREATED)
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
# Query all MalwarePrediction records from the database
|
||||||
|
predictions = MalwarePrediction.objects.filter(model_type=2)
|
||||||
|
|
||||||
|
if not predictions.exists():
|
||||||
|
return Response({"error": "No data available to generate graph."}, status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
# Create a DataFrame from the queryset
|
||||||
|
data = {
|
||||||
|
'process_name': [p.process_name for p in predictions],
|
||||||
|
'class': [p.process_class for p in predictions],
|
||||||
|
'probability_of_malware': [p.probability_of_malware for p in predictions]
|
||||||
|
}
|
||||||
|
df = pd.DataFrame(data)
|
||||||
|
|
||||||
|
# Plot using seaborn or matplotlib
|
||||||
|
plt.figure(figsize=(10, 6))
|
||||||
|
|
||||||
|
# Create a barplot where the class is on the x-axis and the probability is on the y-axis
|
||||||
|
sns.barplot(
|
||||||
|
data=df,
|
||||||
|
x='class', # Independent variable (x-axis)
|
||||||
|
y='probability_of_malware', # Dependent variable (y-axis)
|
||||||
|
ci=None, # No confidence intervals
|
||||||
|
palette='Set2' # Use a color palette for different classes
|
||||||
|
)
|
||||||
|
|
||||||
|
plt.title('Malware Probability by Class')
|
||||||
|
plt.xlabel('Class')
|
||||||
|
plt.ylabel('Probability of Malware')
|
||||||
|
plt.tight_layout()
|
||||||
|
|
||||||
|
# Save the plot to a bytes buffer
|
||||||
|
buf = io.BytesIO()
|
||||||
|
plt.savefig(buf, format='png')
|
||||||
|
buf.seek(0)
|
||||||
|
|
||||||
|
# Return the image as a response
|
||||||
|
return HttpResponse(buf, content_type='image/png')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class XGBModelView(APIView):
|
||||||
|
|
||||||
|
parser_classes = [MultiPartParser] # To handle file uploads
|
||||||
|
|
||||||
|
|
||||||
|
def post(self, request, *args, **kwargs):
|
||||||
|
if 'csv_file' not in request.FILES:
|
||||||
|
return Response({"error": "No file provided"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
csv_file = request.FILES['csv_file']
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Define the temp directory path
|
||||||
|
temp_dir = os.path.join(settings.MEDIA_ROOT, 'temp')
|
||||||
|
|
||||||
|
# Create the 'temp' directory if it doesn't exist
|
||||||
|
if not os.path.exists(temp_dir):
|
||||||
|
os.makedirs(temp_dir)
|
||||||
|
|
||||||
|
# Save the file temporarily
|
||||||
|
temp_file_path = os.path.join(temp_dir, csv_file.name)
|
||||||
|
with default_storage.open(temp_file_path, 'wb+') as destination:
|
||||||
|
for chunk in csv_file.chunks():
|
||||||
|
destination.write(chunk)
|
||||||
|
|
||||||
|
# Read the CSV file with headers
|
||||||
|
df = pd.read_csv(temp_file_path)
|
||||||
|
|
||||||
|
# Extract column names from the CSV
|
||||||
|
actual_columns = df.columns.tolist()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return Response({"error": "Could not read the CSV file", "details": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# Define the expected column names
|
||||||
|
expected_columns = ['process_name', 'class', 'probability_of_malware']
|
||||||
|
|
||||||
|
# Mapping logic
|
||||||
|
if actual_columns != expected_columns:
|
||||||
|
# Map actual column names to expected ones
|
||||||
|
column_mapping = dict(zip(actual_columns, expected_columns))
|
||||||
|
df.rename(columns=column_mapping, inplace=True)
|
||||||
|
|
||||||
|
# Continue with processing the dataframe...
|
||||||
|
records_saved = 0
|
||||||
|
for _, row in df.iterrows():
|
||||||
|
process_name = row['process_name']
|
||||||
|
process_class = row['class']
|
||||||
|
probability = row['probability_of_malware']
|
||||||
|
|
||||||
|
# Save the row to the database
|
||||||
|
MalwarePrediction.objects.create(
|
||||||
|
process_name=process_name,
|
||||||
|
process_class=process_class,
|
||||||
|
probability_of_malware=probability,
|
||||||
|
model_type=3
|
||||||
|
)
|
||||||
|
records_saved += 1
|
||||||
|
|
||||||
|
return Response({"message": " XGB file saved successfully!"}, status=status.HTTP_201_CREATED)
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
# Query all MalwarePrediction records from the database
|
||||||
|
predictions = MalwarePrediction.objects.filter(model_type=3)
|
||||||
|
|
||||||
|
if not predictions.exists():
|
||||||
|
return Response({"error": "No data available to generate graph."}, status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
# Create a DataFrame from the queryset
|
||||||
|
data = {
|
||||||
|
'process_name': [p.process_name for p in predictions],
|
||||||
|
'class': [p.process_class for p in predictions],
|
||||||
|
'probability_of_malware': [p.probability_of_malware for p in predictions]
|
||||||
|
}
|
||||||
|
df = pd.DataFrame(data)
|
||||||
|
|
||||||
|
# Plot using seaborn or matplotlib
|
||||||
|
plt.figure(figsize=(10, 6))
|
||||||
|
|
||||||
|
# Create a barplot where the class is on the x-axis and the probability is on the y-axis
|
||||||
|
sns.barplot(
|
||||||
|
data=df,
|
||||||
|
x='class', # Independent variable (x-axis)
|
||||||
|
y='probability_of_malware', # Dependent variable (y-axis)
|
||||||
|
ci=None, # No confidence intervals
|
||||||
|
palette='Set2' # Use a color palette for different classes
|
||||||
|
)
|
||||||
|
|
||||||
|
plt.title('Malware Probability by Class')
|
||||||
|
plt.xlabel('Class')
|
||||||
|
plt.ylabel('Probability of Malware')
|
||||||
|
plt.tight_layout()
|
||||||
|
|
||||||
|
# Save the plot to a bytes buffer
|
||||||
|
buf = io.BytesIO()
|
||||||
|
plt.savefig(buf, format='png')
|
||||||
|
buf.seek(0)
|
||||||
|
|
||||||
|
# Return the image as a response
|
||||||
|
return HttpResponse(buf, content_type='image/png')
|
||||||
|
|
||||||
|
|
||||||
|
class SGDModelView(APIView):
|
||||||
|
|
||||||
|
|
||||||
|
parser_classes = [MultiPartParser] # To handle file uploads
|
||||||
|
|
||||||
|
|
||||||
|
def post(self, request, *args, **kwargs):
|
||||||
|
if 'csv_file' not in request.FILES:
|
||||||
|
return Response({"error": "No file provided"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
csv_file = request.FILES['csv_file']
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Define the temp directory path
|
||||||
|
temp_dir = os.path.join(settings.MEDIA_ROOT, 'temp')
|
||||||
|
|
||||||
|
# Create the 'temp' directory if it doesn't exist
|
||||||
|
if not os.path.exists(temp_dir):
|
||||||
|
os.makedirs(temp_dir)
|
||||||
|
|
||||||
|
# Save the file temporarily
|
||||||
|
temp_file_path = os.path.join(temp_dir, csv_file.name)
|
||||||
|
with default_storage.open(temp_file_path, 'wb+') as destination:
|
||||||
|
for chunk in csv_file.chunks():
|
||||||
|
destination.write(chunk)
|
||||||
|
|
||||||
|
# Read the CSV file with headers
|
||||||
|
df = pd.read_csv(temp_file_path)
|
||||||
|
|
||||||
|
# Extract column names from the CSV
|
||||||
|
actual_columns = df.columns.tolist()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return Response({"error": "Could not read the CSV file", "details": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# Define the expected column names
|
||||||
|
expected_columns = ['process_name', 'class', 'probability_of_malware']
|
||||||
|
|
||||||
|
# Mapping logic
|
||||||
|
if actual_columns != expected_columns:
|
||||||
|
# Map actual column names to expected ones
|
||||||
|
column_mapping = dict(zip(actual_columns, expected_columns))
|
||||||
|
df.rename(columns=column_mapping, inplace=True)
|
||||||
|
|
||||||
|
# Continue with processing the dataframe...
|
||||||
|
records_saved = 0
|
||||||
|
for _, row in df.iterrows():
|
||||||
|
process_name = row['process_name']
|
||||||
|
process_class = row['class']
|
||||||
|
probability = row['probability_of_malware']
|
||||||
|
|
||||||
|
# Save the row to the database
|
||||||
|
MalwarePrediction.objects.create(
|
||||||
|
process_name=process_name,
|
||||||
|
process_class=process_class,
|
||||||
|
probability_of_malware=probability,
|
||||||
|
model_type=4
|
||||||
|
)
|
||||||
|
records_saved += 1
|
||||||
|
|
||||||
|
return Response({"message": " SGD file saved successfully!"}, status=status.HTTP_201_CREATED)
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
# Query all MalwarePrediction records from the database
|
||||||
|
predictions = MalwarePrediction.objects.filter(model_type=4)
|
||||||
|
|
||||||
|
if not predictions.exists():
|
||||||
|
return Response({"error": "No data available to generate graph."}, status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
# Create a DataFrame from the queryset
|
||||||
|
data = {
|
||||||
|
'process_name': [p.process_name for p in predictions],
|
||||||
|
'class': [p.process_class for p in predictions],
|
||||||
|
'probability_of_malware': [p.probability_of_malware for p in predictions]
|
||||||
|
}
|
||||||
|
df = pd.DataFrame(data)
|
||||||
|
|
||||||
|
# Plot using seaborn or matplotlib
|
||||||
|
plt.figure(figsize=(10, 6))
|
||||||
|
|
||||||
|
# Create a barplot where the class is on the x-axis and the probability is on the y-axis
|
||||||
|
sns.barplot(
|
||||||
|
data=df,
|
||||||
|
x='class', # Independent variable (x-axis)
|
||||||
|
y='probability_of_malware', # Dependent variable (y-axis)
|
||||||
|
ci=None, # No confidence intervals
|
||||||
|
palette='Set2' # Use a color palette for different classes
|
||||||
|
)
|
||||||
|
|
||||||
|
plt.title('Malware Probability by Class')
|
||||||
|
plt.xlabel('Class')
|
||||||
|
plt.ylabel('Probability of Malware')
|
||||||
|
plt.tight_layout()
|
||||||
|
|
||||||
|
# Save the plot to a bytes buffer
|
||||||
|
buf = io.BytesIO()
|
||||||
|
plt.savefig(buf, format='png')
|
||||||
|
buf.seek(0)
|
||||||
|
|
||||||
|
# Return the image as a response
|
||||||
|
return HttpResponse(buf, content_type='image/png')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class KNeighborsModelView(APIView):
|
class KNeighborsModelView(APIView):
|
||||||
parser_classes = [MultiPartParser] # To handle file uploads
|
parser_classes = [MultiPartParser] # To handle file uploads
|
||||||
|
|||||||
@ -13,6 +13,11 @@
|
|||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class="tabSection">
|
<div class="tabSection">
|
||||||
|
{% if message %}
|
||||||
|
<div class="alert alert-warning" role="alert">
|
||||||
|
{{ message }}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
<div class="tabMalwareTitle">
|
<div class="tabMalwareTitle">
|
||||||
<p class="tabSectionMalware tabSectionActiveMalware">Alerts and Alarms</p>
|
<p class="tabSectionMalware tabSectionActiveMalware">Alerts and Alarms</p>
|
||||||
<!-- <p class="tabSectionMalware ">Alerts and Alarms</p> -->
|
<!-- <p class="tabSectionMalware ">Alerts and Alarms</p> -->
|
||||||
@ -285,14 +290,25 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="tabSectionDetails">
|
<div class="tabSectionDetails">
|
||||||
<pre id="MalwareLogActivity">
|
<pre id="MalwareLogActivity">
|
||||||
|
|
||||||
{% for line in logs %}
|
{% for line in logs %}
|
||||||
|
<script>
|
||||||
|
setInterval(fetchLogs, 2000);
|
||||||
|
|
||||||
|
</script>
|
||||||
|
|
||||||
<p>{{line}}</p>
|
<p>{{line}}</p>
|
||||||
|
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
<!-- {% if message %}
|
||||||
|
<div class="alert alert-warning" role="alert">
|
||||||
|
{{ message }}
|
||||||
|
</div>
|
||||||
|
{% endif %} -->
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@ -303,7 +319,7 @@
|
|||||||
<script src="{% static 'malware/js/PredictedChart.js' %}"></script>
|
<script src="{% static 'malware/js/PredictedChart.js' %}"></script>
|
||||||
<script type="application/json" id="class-frequency-data">{{ class_frequency|safe }}</script>
|
<script type="application/json" id="class-frequency-data">{{ class_frequency|safe }}</script>
|
||||||
|
|
||||||
<script type="text/javascript">
|
<!-- <script type="text/javascript">
|
||||||
google.charts.load("current", { packages: ['corechart'] });
|
google.charts.load("current", { packages: ['corechart'] });
|
||||||
google.charts.setOnLoadCallback(drawChart);
|
google.charts.setOnLoadCallback(drawChart);
|
||||||
|
|
||||||
@ -356,7 +372,145 @@
|
|||||||
var chart = new google.visualization.ColumnChart(document.getElementById("barMalwareChart"));
|
var chart = new google.visualization.ColumnChart(document.getElementById("barMalwareChart"));
|
||||||
chart.draw(view, options);
|
chart.draw(view, options);
|
||||||
}
|
}
|
||||||
|
</script> -->
|
||||||
|
|
||||||
|
<!-- <script type="text/javascript">
|
||||||
|
google.charts.load("current", { packages: ['corechart'] });
|
||||||
|
google.charts.setOnLoadCallback(drawChart);
|
||||||
|
|
||||||
|
function drawChart() {
|
||||||
|
// Function to fetch and draw the chart
|
||||||
|
function updateChart() {
|
||||||
|
// Use the Django context variable to populate the frequencies
|
||||||
|
var classData = document.getElementById('class-frequency-data').textContent
|
||||||
|
classData = classData.replace(/'/g, '"');
|
||||||
|
var classFrequency = JSON.parse(classData);
|
||||||
|
|
||||||
|
var data = google.visualization.arrayToDataTable([
|
||||||
|
["Element", "Density", { role: "style" }],
|
||||||
|
["Ramnit", parseInt(classFrequency['Ramnit']), "#000080"],
|
||||||
|
["Lollipop", parseInt(classFrequency['LolliPop']), "#0000ff"],
|
||||||
|
["Kelihos_ver3", parseInt(classFrequency['Kelihos_ver3']), "#0081ff"],
|
||||||
|
["Vundo", parseInt(classFrequency['Vundo']), "#17ffe2"],
|
||||||
|
["Simda", parseInt(classFrequency['Simda']), "#7bff7b"],
|
||||||
|
["Tracur", parseInt(classFrequency['Tracur']), "#e3ff15"],
|
||||||
|
["Kelihos_ver1", parseInt(classFrequency['Kelihos_ver1']), "#ff9801"],
|
||||||
|
["Obfuscator.ACY", parseInt(classFrequency['Obfuscator.ACY']), "#ff2200"],
|
||||||
|
["Gatak", parseInt(classFrequency['Gatak']), "#810000"],
|
||||||
|
]);
|
||||||
|
|
||||||
|
var view = new google.visualization.DataView(data);
|
||||||
|
view.setColumns([0, 1, 2]);
|
||||||
|
|
||||||
|
var options = {
|
||||||
|
title: "",
|
||||||
|
height: 430,
|
||||||
|
width: 900,
|
||||||
|
legend: { position: "none" },
|
||||||
|
backgroundColor: '#0c212b',
|
||||||
|
tooltip: { trigger: 'none' },
|
||||||
|
vAxis: {
|
||||||
|
title: 'Count',
|
||||||
|
viewWindow: {
|
||||||
|
min: 0,
|
||||||
|
max: 200
|
||||||
|
},
|
||||||
|
ticks: [0, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200],
|
||||||
|
},
|
||||||
|
hAxis: {
|
||||||
|
title: 'Class',
|
||||||
|
slantedText: true, // Tilt the text to prevent collisions
|
||||||
|
},
|
||||||
|
legend: { position: 'none' },
|
||||||
|
};
|
||||||
|
|
||||||
|
var chart = new google.visualization.ColumnChart(document.getElementById("barMalwareChart"));
|
||||||
|
chart.draw(view, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initial draw
|
||||||
|
updateChart();
|
||||||
|
|
||||||
|
// Set up real-time updates every 5 seconds
|
||||||
|
setInterval(updateChart, 1000);
|
||||||
|
}
|
||||||
|
</script> -->
|
||||||
|
|
||||||
|
|
||||||
|
<script type="text/javascript">
|
||||||
|
google.charts.load("current", { packages: ['corechart'] });
|
||||||
|
google.charts.setOnLoadCallback(drawChart);
|
||||||
|
|
||||||
|
function drawChart() {
|
||||||
|
// Function to fetch and draw the chart
|
||||||
|
function updateChart() {
|
||||||
|
// Use the Django context variable to populate the frequencies
|
||||||
|
var classData = document.getElementById('class-frequency-data').textContent;
|
||||||
|
classData = classData.replace(/'/g, '"');
|
||||||
|
var classFrequency = JSON.parse(classData);
|
||||||
|
|
||||||
|
// Check if classFrequency is empty or contains invalid data
|
||||||
|
var isEmpty = Object.values(classFrequency).every(function(value) {
|
||||||
|
return value === 0 || value === null || value === undefined;
|
||||||
|
});
|
||||||
|
|
||||||
|
// If data is empty, show a placeholder or empty graph
|
||||||
|
if (isEmpty) {
|
||||||
|
document.getElementById("barMalwareChart").innerHTML = "<h3>No data available to display</h3>";
|
||||||
|
return; // Stop further execution if no data
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare the data for the chart
|
||||||
|
var data = google.visualization.arrayToDataTable([
|
||||||
|
["Element", "Density", { role: "style" }],
|
||||||
|
["Ramnit", parseInt(classFrequency['Ramnit']), "#000080"],
|
||||||
|
["Lollipop", parseInt(classFrequency['LolliPop']), "#0000ff"],
|
||||||
|
["Kelihos_ver3", parseInt(classFrequency['Kelihos_ver3']), "#0081ff"],
|
||||||
|
["Vundo", parseInt(classFrequency['Vundo']), "#17ffe2"],
|
||||||
|
["Simda", parseInt(classFrequency['Simda']), "#7bff7b"],
|
||||||
|
["Tracur", parseInt(classFrequency['Tracur']), "#e3ff15"],
|
||||||
|
["Kelihos_ver1", parseInt(classFrequency['Kelihos_ver1']), "#ff9801"],
|
||||||
|
["Obfuscator.ACY", parseInt(classFrequency['Obfuscator.ACY']), "#ff2200"],
|
||||||
|
["Gatak", parseInt(classFrequency['Gatak']), "#810000"],
|
||||||
|
]);
|
||||||
|
|
||||||
|
var view = new google.visualization.DataView(data);
|
||||||
|
view.setColumns([0, 1, 2]);
|
||||||
|
|
||||||
|
var options = {
|
||||||
|
title: "",
|
||||||
|
height: 430,
|
||||||
|
width: 900,
|
||||||
|
legend: { position: "none" },
|
||||||
|
backgroundColor: '#0c212b',
|
||||||
|
tooltip: { trigger: 'none' },
|
||||||
|
vAxis: {
|
||||||
|
title: 'Count',
|
||||||
|
viewWindow: {
|
||||||
|
min: 0,
|
||||||
|
max: 200
|
||||||
|
},
|
||||||
|
ticks: [0, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200],
|
||||||
|
},
|
||||||
|
hAxis: {
|
||||||
|
title: 'Class',
|
||||||
|
slantedText: true, // Tilt the text to prevent collisions
|
||||||
|
},
|
||||||
|
legend: { position: 'none' },
|
||||||
|
};
|
||||||
|
|
||||||
|
var chart = new google.visualization.ColumnChart(document.getElementById("barMalwareChart"));
|
||||||
|
chart.draw(view, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initial draw
|
||||||
|
updateChart();
|
||||||
|
|
||||||
|
// Set up real-time updates every 5 seconds
|
||||||
|
setInterval(updateChart, 1000);
|
||||||
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<script type="application/json" id="class-average-data">{{ average|safe }}</script>
|
<script type="application/json" id="class-average-data">{{ average|safe }}</script>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
|
|||||||
Binary file not shown.
@ -87,11 +87,11 @@ WSGI_APPLICATION = 'x_sys.wsgi.application'
|
|||||||
DATABASES = {
|
DATABASES = {
|
||||||
'default': {
|
'default': {
|
||||||
'ENGINE': 'django.db.backends.postgresql',
|
'ENGINE': 'django.db.backends.postgresql',
|
||||||
'NAME': 'webdefender',
|
'NAME': 'xsysdb',
|
||||||
'USER': 'defenderuser',
|
'USER': 'tech4biz',
|
||||||
'PASSWORD': 'Admin@123',
|
'PASSWORD': 'Admin@123',
|
||||||
'HOST':'localhost',
|
'HOST': 'localhost',
|
||||||
'PORT': '5432',
|
'PORT': '5433',
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user