mirror of
https://github.com/suitenumerique/django-lasuite
synced 2026-04-25 17:15:14 +02:00
We want to log the file has when a submission fail in order to aggregate all this logs to send them to JCOP.
946 lines
33 KiB
Python
946 lines
33 KiB
Python
"""Test JCop backend."""
|
|
|
|
from unittest import mock
|
|
|
|
import pytest
|
|
import requests
|
|
import responses
|
|
from django.core.files.base import ContentFile
|
|
from django.core.files.storage import default_storage
|
|
|
|
from lasuite.malware_detection import exceptions
|
|
from lasuite.malware_detection.backends import jcop
|
|
from lasuite.malware_detection.enums import ReportStatus
|
|
from lasuite.malware_detection.models import MalwareDetection, MalwareDetectionStatus
|
|
from lasuite.malware_detection.tasks.jcop import analyse_file_async, trigger_new_analysis
|
|
from tests import factories
|
|
|
|
jcop_callback = mock.MagicMock()
|
|
|
|
pytestmark = pytest.mark.django_db
|
|
|
|
|
|
@pytest.fixture(autouse=True)
|
|
def reset_jcop_callback():
|
|
"""Reset jcop_backend mock."""
|
|
jcop_callback.reset_mock()
|
|
|
|
|
|
@pytest.fixture
|
|
def jcop_generate_file_path():
|
|
"""Create a file and return the path."""
|
|
file_path = "file.txt"
|
|
content_file = ContentFile("test")
|
|
default_storage.save(file_path, content_file)
|
|
yield file_path, "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08"
|
|
default_storage.delete(file_path)
|
|
|
|
|
|
@pytest.fixture
|
|
def jcop_backend_parameters():
|
|
"""Return the parameters for the JCOP backend."""
|
|
return {
|
|
"base_url": "https://malware_detection.tld/api/v1",
|
|
"api_key": "xxxx",
|
|
"callback_path": "tests.malware_detection.backends.test_jcop_backend.jcop_callback",
|
|
}
|
|
|
|
|
|
@pytest.fixture
|
|
def jcop_backend(jcop_backend_parameters):
|
|
"""Create a JCOP backend instance with test configuration."""
|
|
return jcop.JCOPBackend(
|
|
base_url="https://malware_detection.tld/api/v1",
|
|
api_key="xxxx",
|
|
callback_path="tests.malware_detection.backends.test_jcop_backend.jcop_callback",
|
|
)
|
|
|
|
|
|
def test_jcop_backend_max_processing_files_negative():
|
|
"""Test when the max processing files is negative, the backend raises a ValueError."""
|
|
with pytest.raises(ValueError, match="max_processing_files must be greater than or equal to 0"):
|
|
jcop.JCOPBackend(
|
|
base_url="https://malware_detection.tld/api/v1",
|
|
api_key="xxxx",
|
|
callback_path="tests.malware_detection.backends.test_jcop_backend.jcop_callback",
|
|
max_processing_files=-1,
|
|
)
|
|
|
|
|
|
def test_jcop_backend_file_exists(jcop_generate_file_path, jcop_backend):
|
|
"""Test call the analyse_file_async when file exists."""
|
|
with (
|
|
mock.patch.object(analyse_file_async, "delay") as analyse_file_async_mock,
|
|
):
|
|
file_path, _ = jcop_generate_file_path
|
|
assert not MalwareDetection.objects.filter(path=file_path).exists()
|
|
jcop_backend.analyse_file(file_path)
|
|
analyse_file_async_mock.assert_called_once_with(
|
|
"file.txt",
|
|
)
|
|
|
|
assert MalwareDetection.objects.filter(
|
|
backend="lasuite.malware_detection.backends.jcop.JCOPBackend", path=file_path
|
|
).exists()
|
|
|
|
|
|
def test_jcop_backend_analyse_file_max_processing_reached(jcop_generate_file_path):
|
|
"""When the max processing files is reached, the file is not analysed."""
|
|
factories.MalwareDetectionFactory.create_batch(10, status=MalwareDetectionStatus.PROCESSING)
|
|
|
|
backend = jcop.JCOPBackend(
|
|
base_url="https://malware_detection.tld/api/v1",
|
|
api_key="xxxx",
|
|
callback_path="tests.malware_detection.backends.test_jcop_backend.jcop_callback",
|
|
max_processing_files=10,
|
|
)
|
|
|
|
with (
|
|
mock.patch.object(analyse_file_async, "delay") as analyse_file_async_mock,
|
|
):
|
|
file_path, _ = jcop_generate_file_path
|
|
assert not MalwareDetection.objects.filter(path=file_path).exists()
|
|
backend.analyse_file(file_path, foo="bar")
|
|
analyse_file_async_mock.assert_not_called()
|
|
|
|
assert MalwareDetection.objects.filter(
|
|
backend="lasuite.malware_detection.backends.jcop.JCOPBackend",
|
|
path=file_path,
|
|
status=MalwareDetectionStatus.PENDING,
|
|
).exists()
|
|
assert MalwareDetection.objects.filter(status=MalwareDetectionStatus.PROCESSING).count() == 10
|
|
|
|
|
|
def test_jcop_backend_file_exists_called_with_kwargs(jcop_generate_file_path, jcop_backend):
|
|
"""Test call the analyse_file_async with kwargs when file exists."""
|
|
with (
|
|
mock.patch.object(analyse_file_async, "delay") as analyse_file_async_mock,
|
|
):
|
|
file_path, _ = jcop_generate_file_path
|
|
jcop_backend.analyse_file(file_path, foo="bar")
|
|
analyse_file_async_mock.assert_called_once_with(
|
|
"file.txt",
|
|
foo="bar",
|
|
)
|
|
|
|
|
|
def test_jcop_backend_file_does_not_exists(jcop_backend):
|
|
"""Test call the backend file not existing must raise an exception."""
|
|
with pytest.raises(FileNotFoundError):
|
|
jcop_backend.analyse_file("file.txt")
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
@pytest.mark.parametrize("with_file_hash", [True, False])
|
|
def test_jcop_backend_analyse_file_async_done_no_malware(
|
|
jcop_generate_file_path, jcop_backend, used_kwargs, with_file_hash
|
|
):
|
|
"""Test check_analysis with a safe file file."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the results endpoint
|
|
responses.add(
|
|
responses.GET,
|
|
f"https://malware_detection.tld/api/v1/results/{file_hash}",
|
|
json={"done": True, "is_malware": False},
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=200,
|
|
)
|
|
|
|
should_retry = jcop_backend.check_analysis(
|
|
file_path, file_hash=file_hash if with_file_hash else None, **used_kwargs
|
|
)
|
|
assert should_retry is False
|
|
jcop_callback.assert_called_once_with("file.txt", ReportStatus.SAFE, error_info={}, **used_kwargs)
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
@pytest.mark.parametrize("with_file_hash", [True, False])
|
|
def test_jcop_backend_analyse_file_async_done_no_malware_should_delete_detection_record(
|
|
jcop_generate_file_path, jcop_backend, used_kwargs, with_file_hash
|
|
):
|
|
"""Test check_analysis with a safe file file."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
factories.MalwareDetectionFactory(path=file_path, status=MalwareDetectionStatus.PROCESSING)
|
|
|
|
# Mock the results endpoint
|
|
responses.add(
|
|
responses.GET,
|
|
f"https://malware_detection.tld/api/v1/results/{file_hash}",
|
|
json={"done": True, "is_malware": False},
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=200,
|
|
)
|
|
|
|
should_retry = jcop_backend.check_analysis(
|
|
file_path, file_hash=file_hash if with_file_hash else None, **used_kwargs
|
|
)
|
|
assert should_retry is False
|
|
jcop_callback.assert_called_once_with("file.txt", ReportStatus.SAFE, error_info={}, **used_kwargs)
|
|
assert not MalwareDetection.objects.filter(path=file_path).exists()
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
@pytest.mark.parametrize("with_file_hash", [True, False])
|
|
def test_jcop_backend_analyse_file_async_done_no_malware_should_trigger_next_analysis(
|
|
jcop_generate_file_path, jcop_backend, used_kwargs, with_file_hash
|
|
):
|
|
"""Test check_analysis with a safe file file."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
factories.MalwareDetectionFactory(path=file_path, status=MalwareDetectionStatus.PROCESSING)
|
|
next_analysis = factories.MalwareDetectionFactory(status=MalwareDetectionStatus.PENDING)
|
|
|
|
# Mock the results endpoint
|
|
responses.add(
|
|
responses.GET,
|
|
f"https://malware_detection.tld/api/v1/results/{file_hash}",
|
|
json={"done": True, "is_malware": False},
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=200,
|
|
)
|
|
with mock.patch.object(analyse_file_async, "delay") as mock_apply_async:
|
|
should_retry = jcop_backend.check_analysis(
|
|
file_path, file_hash=file_hash if with_file_hash else None, **used_kwargs
|
|
)
|
|
mock_apply_async.assert_called_once_with(next_analysis.path)
|
|
|
|
assert should_retry is False
|
|
jcop_callback.assert_called_once_with("file.txt", ReportStatus.SAFE, error_info={}, **used_kwargs)
|
|
assert not MalwareDetection.objects.filter(path=file_path).exists()
|
|
next_analysis.refresh_from_db()
|
|
assert next_analysis.status == MalwareDetectionStatus.PROCESSING
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
@pytest.mark.parametrize("with_file_hash", [True, False])
|
|
def test_jcop_backend_analyse_file_async_done_is_malware(
|
|
jcop_generate_file_path, jcop_backend, used_kwargs, with_file_hash
|
|
):
|
|
"""Test check_analysis with an unsafe file."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the results endpoint
|
|
responses.add(
|
|
responses.GET,
|
|
f"https://malware_detection.tld/api/v1/results/{file_hash}",
|
|
json={"done": True, "is_malware": True},
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=200,
|
|
)
|
|
|
|
should_retry = jcop_backend.check_analysis(
|
|
file_path, file_hash=file_hash if with_file_hash else None, **used_kwargs
|
|
)
|
|
assert should_retry is False
|
|
jcop_callback.assert_called_once_with(
|
|
file_path,
|
|
ReportStatus.UNSAFE,
|
|
error_info={
|
|
"error": "malware detected",
|
|
"error_code": 5000,
|
|
},
|
|
**used_kwargs,
|
|
)
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
@pytest.mark.parametrize("with_file_hash", [True, False])
|
|
@pytest.mark.parametrize("is_malware", [True, False])
|
|
def test_jcop_backend_analyse_file_async_done_with_error( # noqa: PLR0913
|
|
jcop_generate_file_path, jcop_backend, used_kwargs, with_file_hash, is_malware
|
|
):
|
|
"""Test check_analysis with errors in the response."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the results endpoint
|
|
responses.add(
|
|
responses.GET,
|
|
f"https://malware_detection.tld/api/v1/results/{file_hash}",
|
|
json={"done": True, "is_malware": is_malware, "error": "error message", "error_code": 4001},
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=200,
|
|
)
|
|
|
|
should_retry = jcop_backend.check_analysis(
|
|
file_path, file_hash=file_hash if with_file_hash else None, **used_kwargs
|
|
)
|
|
assert should_retry is False
|
|
jcop_callback.assert_called_once_with(
|
|
"file.txt",
|
|
ReportStatus.UNSAFE if is_malware else ReportStatus.UNKNOWN,
|
|
error_info={
|
|
"error": "error message",
|
|
"error_code": 4001,
|
|
},
|
|
**used_kwargs,
|
|
)
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
@pytest.mark.parametrize("with_file_hash", [True, False])
|
|
@pytest.mark.parametrize("is_malware", [True, False])
|
|
def test_jcop_backend_analyse_file_async_done_with_error_delete_detection_record( # noqa: PLR0913
|
|
jcop_generate_file_path, jcop_backend, used_kwargs, with_file_hash, is_malware
|
|
):
|
|
"""Test check_analysis with errors in the response."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
malware_detection = factories.MalwareDetectionFactory(path=file_path, status=MalwareDetectionStatus.PROCESSING)
|
|
|
|
# Mock the results endpoint
|
|
responses.add(
|
|
responses.GET,
|
|
f"https://malware_detection.tld/api/v1/results/{file_hash}",
|
|
json={"done": True, "is_malware": is_malware, "error": "error message", "error_code": 4001},
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=200,
|
|
)
|
|
|
|
should_retry = jcop_backend.check_analysis(
|
|
file_path, file_hash=file_hash if with_file_hash else None, **used_kwargs
|
|
)
|
|
assert should_retry is False
|
|
jcop_callback.assert_called_once_with(
|
|
"file.txt",
|
|
ReportStatus.UNSAFE if is_malware else ReportStatus.UNKNOWN,
|
|
error_info={
|
|
"error": "error message",
|
|
"error_code": 4001,
|
|
},
|
|
**used_kwargs,
|
|
)
|
|
malware_detection.refresh_from_db()
|
|
assert malware_detection.status == MalwareDetectionStatus.FAILED
|
|
assert malware_detection.error_code == 4001
|
|
assert malware_detection.error_msg == "error message"
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
@pytest.mark.parametrize("with_file_hash", [True, False])
|
|
@pytest.mark.parametrize("is_malware", [True, False])
|
|
def test_jcop_backend_analyse_file_async_done_with_error_delete_detection_record_trigger_next( # noqa: PLR0913
|
|
jcop_generate_file_path, jcop_backend, used_kwargs, with_file_hash, is_malware
|
|
):
|
|
"""Test check_analysis with errors in the response."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
malware_detection = factories.MalwareDetectionFactory(path=file_path, status=MalwareDetectionStatus.PROCESSING)
|
|
next_record = factories.MalwareDetectionFactory(status=MalwareDetectionStatus.PENDING)
|
|
|
|
# Mock the results endpoint
|
|
responses.add(
|
|
responses.GET,
|
|
f"https://malware_detection.tld/api/v1/results/{file_hash}",
|
|
json={"done": True, "is_malware": is_malware, "error": "error message", "error_code": 4001},
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=200,
|
|
)
|
|
|
|
with mock.patch.object(analyse_file_async, "delay") as mock_apply_async:
|
|
should_retry = jcop_backend.check_analysis(
|
|
file_path, file_hash=file_hash if with_file_hash else None, **used_kwargs
|
|
)
|
|
mock_apply_async.assert_called_once_with(next_record.path)
|
|
|
|
assert should_retry is False
|
|
jcop_callback.assert_called_once_with(
|
|
"file.txt",
|
|
ReportStatus.UNSAFE if is_malware else ReportStatus.UNKNOWN,
|
|
error_info={
|
|
"error": "error message",
|
|
"error_code": 4001,
|
|
},
|
|
**used_kwargs,
|
|
)
|
|
malware_detection.refresh_from_db()
|
|
assert malware_detection.status == MalwareDetectionStatus.FAILED
|
|
assert malware_detection.error_code == 4001
|
|
assert malware_detection.error_msg == "error message"
|
|
next_record.refresh_from_db()
|
|
assert next_record.status == MalwareDetectionStatus.PROCESSING
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
@pytest.mark.parametrize("with_file_hash", [True, False])
|
|
def test_jcop_backend_analyse_file_async_not_done(jcop_generate_file_path, jcop_backend, used_kwargs, with_file_hash):
|
|
"""Test check_analysis with a not done analysis."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the results endpoint
|
|
responses.add(
|
|
responses.GET,
|
|
f"https://malware_detection.tld/api/v1/results/{file_hash}",
|
|
json={"done": False},
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=200,
|
|
)
|
|
|
|
should_retry = jcop_backend.check_analysis(
|
|
file_path, file_hash=file_hash if with_file_hash else None, **used_kwargs
|
|
)
|
|
assert should_retry is True
|
|
jcop_callback.assert_not_called()
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
@pytest.mark.parametrize("with_file_hash", [True, False])
|
|
def test_jcop_backend_analyse_file_async_unauthorized_status_code(
|
|
jcop_generate_file_path, jcop_backend, used_kwargs, with_file_hash
|
|
):
|
|
"""Test check_analysis api return an unauthorized status code."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the results endpoint
|
|
responses.add(
|
|
responses.GET,
|
|
f"https://malware_detection.tld/api/v1/results/{file_hash}",
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=401,
|
|
)
|
|
with pytest.raises(exceptions.MalwareDetectionInvalidAuthenticationError):
|
|
jcop_backend.check_analysis(
|
|
file_path,
|
|
file_hash=file_hash if with_file_hash else None,
|
|
**used_kwargs,
|
|
)
|
|
|
|
jcop_callback.assert_called_once_with(
|
|
file_path,
|
|
ReportStatus.UNKNOWN,
|
|
error_info={
|
|
"error": "Invalid API key",
|
|
"error_code": 401,
|
|
},
|
|
**used_kwargs,
|
|
)
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
@pytest.mark.parametrize("with_file_hash", [True, False])
|
|
def test_jcop_backend_analyse_file_async_no_treatment(
|
|
jcop_generate_file_path, jcop_backend, used_kwargs, with_file_hash
|
|
):
|
|
"""Test when there is no suitable case for check_analysis method."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the results endpoint
|
|
responses.add(
|
|
responses.GET,
|
|
f"https://malware_detection.tld/api/v1/results/{file_hash}",
|
|
json={"done": True},
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=200,
|
|
)
|
|
|
|
should_retry = jcop_backend.check_analysis(
|
|
file_path, file_hash=file_hash if with_file_hash else None, **used_kwargs
|
|
)
|
|
assert should_retry is False
|
|
jcop_callback.assert_called_once_with(
|
|
file_path,
|
|
ReportStatus.UNKNOWN,
|
|
error_info={
|
|
"error": "Unknown treatment",
|
|
"error_code": 200,
|
|
},
|
|
**used_kwargs,
|
|
)
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
@pytest.mark.parametrize("with_file_hash", [True, False])
|
|
def test_jcop_backend_analyse_file_async_request_error(
|
|
jcop_generate_file_path, jcop_backend, used_kwargs, with_file_hash
|
|
):
|
|
"""Test with a request exception should raise a Request exception."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the results endpoint
|
|
responses.add(
|
|
responses.GET,
|
|
f"https://malware_detection.tld/api/v1/results/{file_hash}",
|
|
body=requests.exceptions.RequestException(),
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=200,
|
|
)
|
|
with pytest.raises(requests.exceptions.RequestException):
|
|
jcop_backend.check_analysis(file_path, file_hash=file_hash if with_file_hash else None, **used_kwargs)
|
|
|
|
jcop_callback.assert_not_called()
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
def test_jcop_backend_analyse_file_async_no_existing_result(jcop_generate_file_path, jcop_backend, used_kwargs):
|
|
"""Test when no result found should start the trigger_new_analysis task."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the results endpoint
|
|
responses.add(
|
|
responses.GET,
|
|
f"https://malware_detection.tld/api/v1/results/{file_hash}",
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=404,
|
|
)
|
|
with mock.patch.object(trigger_new_analysis, "delay") as mock_trigger_new_analysis:
|
|
should_retry = jcop_backend.check_analysis(file_path, file_hash=file_hash, **used_kwargs)
|
|
assert should_retry is False
|
|
|
|
mock_trigger_new_analysis.assert_called_once_with(
|
|
file_path,
|
|
file_hash,
|
|
**used_kwargs,
|
|
)
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
def test_jcop_backend_trigger_new_analysis_success(jcop_generate_file_path, jcop_backend, used_kwargs):
|
|
"""Test successful submission of a file for analysis."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the submit endpoint
|
|
responses.add(
|
|
responses.POST,
|
|
"https://malware_detection.tld/api/v1/submit",
|
|
json={"id": file_hash},
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=200,
|
|
)
|
|
|
|
with mock.patch.object(analyse_file_async, "apply_async") as mock_apply_async:
|
|
jcop_backend.trigger_new_analysis(file_path, file_hash, **used_kwargs)
|
|
mock_apply_async.assert_called_once_with(
|
|
countdown=5,
|
|
args=(file_path,),
|
|
kwargs={
|
|
"file_hash": file_hash,
|
|
**used_kwargs,
|
|
},
|
|
)
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
def test_jcop_backend_trigger_new_analysis_unauthorized(jcop_generate_file_path, jcop_backend, used_kwargs):
|
|
"""Test submission with invalid API key."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the submit endpoint
|
|
responses.add(
|
|
responses.POST,
|
|
"https://malware_detection.tld/api/v1/submit",
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=401,
|
|
)
|
|
|
|
with pytest.raises(exceptions.MalwareDetectionInvalidAuthenticationError):
|
|
jcop_backend.trigger_new_analysis(file_path, file_hash, **used_kwargs)
|
|
|
|
jcop_callback.assert_called_once_with(
|
|
file_path,
|
|
ReportStatus.UNKNOWN,
|
|
error_info={
|
|
"error": "Invalid API key",
|
|
"error_code": 401,
|
|
},
|
|
**used_kwargs,
|
|
)
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
def test_jcop_backend_trigger_new_analysis_unauthorized_complete_flow(
|
|
jcop_generate_file_path, jcop_backend, used_kwargs
|
|
):
|
|
"""Test submission with invalid API key."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
malware_detection = factories.MalwareDetectionFactory(path=file_path, status=MalwareDetectionStatus.PROCESSING)
|
|
next_record = factories.MalwareDetectionFactory(status=MalwareDetectionStatus.PENDING)
|
|
|
|
# Mock the submit endpoint
|
|
responses.add(
|
|
responses.POST,
|
|
"https://malware_detection.tld/api/v1/submit",
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=401,
|
|
)
|
|
|
|
with (
|
|
mock.patch.object(analyse_file_async, "delay") as mock_apply_async,
|
|
pytest.raises(exceptions.MalwareDetectionInvalidAuthenticationError),
|
|
):
|
|
jcop_backend.trigger_new_analysis(file_path, file_hash, **used_kwargs)
|
|
|
|
mock_apply_async.assert_called_once_with(next_record.path)
|
|
|
|
jcop_callback.assert_called_once_with(
|
|
file_path,
|
|
ReportStatus.UNKNOWN,
|
|
error_info={
|
|
"error": "Invalid API key",
|
|
"error_code": 401,
|
|
},
|
|
**used_kwargs,
|
|
)
|
|
malware_detection.refresh_from_db()
|
|
assert malware_detection.status == MalwareDetectionStatus.FAILED
|
|
assert malware_detection.error_code == 401
|
|
assert malware_detection.error_msg == "Invalid API key"
|
|
next_record.refresh_from_db()
|
|
assert next_record.status == MalwareDetectionStatus.PROCESSING
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
def test_jcop_backend_trigger_new_analysis_timeout_max_retries(jcop_generate_file_path, jcop_backend, used_kwargs):
|
|
"""Test submission with timeout after max retries."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the submit endpoint
|
|
responses.add(
|
|
responses.POST,
|
|
"https://malware_detection.tld/api/v1/submit",
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=408,
|
|
)
|
|
|
|
with pytest.raises(TimeoutError):
|
|
jcop_backend.trigger_new_analysis(file_path, file_hash, **used_kwargs)
|
|
|
|
jcop_callback.assert_not_called()
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
def test_jcop_backend_trigger_new_analysis_file_too_large(jcop_generate_file_path, jcop_backend, used_kwargs):
|
|
"""Test submission with file too large."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the submit endpoint
|
|
responses.add(
|
|
responses.POST,
|
|
"https://malware_detection.tld/api/v1/submit",
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=413,
|
|
)
|
|
|
|
jcop_backend.trigger_new_analysis(file_path, file_hash, **used_kwargs)
|
|
|
|
jcop_callback.assert_called_once_with(
|
|
file_path,
|
|
ReportStatus.UNKNOWN,
|
|
error_info={
|
|
"error": "File too large",
|
|
"error_code": 413,
|
|
},
|
|
**used_kwargs,
|
|
)
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
def test_jcop_backend_trigger_new_analysis_file_too_large_complete_flow(
|
|
jcop_generate_file_path, jcop_backend, used_kwargs
|
|
):
|
|
"""Test submission with file too large."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
malware_detection = factories.MalwareDetectionFactory(path=file_path, status=MalwareDetectionStatus.PROCESSING)
|
|
next_record = factories.MalwareDetectionFactory(status=MalwareDetectionStatus.PENDING)
|
|
# Mock the submit endpoint
|
|
responses.add(
|
|
responses.POST,
|
|
"https://malware_detection.tld/api/v1/submit",
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=413,
|
|
)
|
|
|
|
with mock.patch.object(analyse_file_async, "delay") as mock_apply_async:
|
|
jcop_backend.trigger_new_analysis(file_path, file_hash, **used_kwargs)
|
|
mock_apply_async.assert_called_once_with(next_record.path)
|
|
|
|
jcop_callback.assert_called_once_with(
|
|
file_path,
|
|
ReportStatus.UNKNOWN,
|
|
error_info={
|
|
"error": "File too large",
|
|
"error_code": 413,
|
|
},
|
|
**used_kwargs,
|
|
)
|
|
malware_detection.refresh_from_db()
|
|
assert malware_detection.status == MalwareDetectionStatus.FAILED
|
|
assert malware_detection.error_code == 413
|
|
assert malware_detection.error_msg == "File too large"
|
|
next_record.refresh_from_db()
|
|
assert next_record.status == MalwareDetectionStatus.PROCESSING
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
def test_jcop_backend_trigger_new_analysis_request_error(jcop_generate_file_path, jcop_backend, used_kwargs):
|
|
"""Test submission with request error."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the submit endpoint
|
|
responses.add(
|
|
responses.POST,
|
|
"https://malware_detection.tld/api/v1/submit",
|
|
body=requests.exceptions.RequestException(),
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=200,
|
|
)
|
|
with pytest.raises(requests.exceptions.RequestException):
|
|
jcop_backend.trigger_new_analysis(file_path, file_hash, **used_kwargs)
|
|
|
|
jcop_callback.assert_not_called()
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
def test_jcop_backend_trigger_new_analysis_unknown_status(jcop_generate_file_path, jcop_backend, used_kwargs):
|
|
"""Test submission with unknown status code."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
|
|
# Mock the submit endpoint
|
|
responses.add(
|
|
responses.POST,
|
|
"https://malware_detection.tld/api/v1/submit",
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=500,
|
|
)
|
|
|
|
jcop_backend.trigger_new_analysis(file_path, file_hash, **used_kwargs)
|
|
|
|
jcop_callback.assert_called_once_with(
|
|
file_path,
|
|
ReportStatus.UNKNOWN,
|
|
error_info={
|
|
"error": "Unknown treatment",
|
|
"error_code": 500,
|
|
},
|
|
**used_kwargs,
|
|
)
|
|
|
|
|
|
@responses.activate
|
|
@pytest.mark.parametrize("used_kwargs", [{}, {"foo": "bar"}])
|
|
def test_jcop_backend_trigger_new_analysis_unknown_status_complete_flow(
|
|
jcop_generate_file_path, jcop_backend, used_kwargs
|
|
):
|
|
"""Test submission with unknown status code."""
|
|
file_path, file_hash = jcop_generate_file_path
|
|
malware_detection = factories.MalwareDetectionFactory(path=file_path, status=MalwareDetectionStatus.PROCESSING)
|
|
next_record = factories.MalwareDetectionFactory(status=MalwareDetectionStatus.PENDING)
|
|
# Mock the submit endpoint
|
|
responses.add(
|
|
responses.POST,
|
|
"https://malware_detection.tld/api/v1/submit",
|
|
headers={
|
|
"X-Auth-Token": "xxxx",
|
|
"Accept": "application/json",
|
|
},
|
|
status=500,
|
|
)
|
|
|
|
with mock.patch.object(analyse_file_async, "delay") as mock_apply_async:
|
|
jcop_backend.trigger_new_analysis(file_path, file_hash, **used_kwargs)
|
|
mock_apply_async.assert_called_once_with(next_record.path)
|
|
|
|
jcop_callback.assert_called_once_with(
|
|
file_path,
|
|
ReportStatus.UNKNOWN,
|
|
error_info={
|
|
"error": "Unknown treatment",
|
|
"error_code": 500,
|
|
},
|
|
**used_kwargs,
|
|
)
|
|
malware_detection.refresh_from_db()
|
|
assert malware_detection.status == MalwareDetectionStatus.FAILED
|
|
assert malware_detection.error_code == 500
|
|
assert malware_detection.error_msg == "Unknown treatment"
|
|
next_record.refresh_from_db()
|
|
assert next_record.status == MalwareDetectionStatus.PROCESSING
|
|
|
|
|
|
def test_jcop_backend_launch_next_analysis(jcop_generate_file_path, jcop_backend):
|
|
"""Launch a new analysis when calling launch_next_analysis."""
|
|
file_path, _ = jcop_generate_file_path
|
|
factories.MalwareDetectionFactory(status=MalwareDetectionStatus.PENDING, path=file_path, parameters={"foo": "bar"})
|
|
|
|
# Add several more pending detections
|
|
factories.MalwareDetectionFactory.create_batch(10, status=MalwareDetectionStatus.PENDING)
|
|
|
|
with (
|
|
mock.patch.object(analyse_file_async, "delay") as analyse_file_async_mock,
|
|
):
|
|
jcop_backend.launch_next_analysis()
|
|
analyse_file_async_mock.assert_called_once_with(
|
|
file_path,
|
|
foo="bar",
|
|
)
|
|
|
|
|
|
def test_jcop_backend_launch_next_analysis_no_pending_detection(jcop_backend):
|
|
"""When there is no pending detection, the method does nothing."""
|
|
with (
|
|
mock.patch.object(analyse_file_async, "delay") as analyse_file_async_mock,
|
|
):
|
|
jcop_backend.launch_next_analysis()
|
|
analyse_file_async_mock.assert_not_called()
|
|
|
|
|
|
def test_jcop_backend_delete_existing_detection(jcop_backend):
|
|
"""Calling delete_detection on an existing one should delete it."""
|
|
file_path = "file.txt"
|
|
factories.MalwareDetectionFactory(path=file_path)
|
|
|
|
jcop_backend.delete_detection(file_path)
|
|
|
|
assert not MalwareDetection.objects.filter(path=file_path).exists()
|
|
|
|
|
|
def test_jcop_backend_delete_non_existing_detection(jcop_backend):
|
|
"""Calling delete_detection on a non-existing one should do nothing."""
|
|
assert MalwareDetection.objects.count() == 0
|
|
|
|
jcop_backend.delete_detection("file.txt")
|
|
|
|
assert MalwareDetection.objects.count() == 0
|
|
|
|
|
|
def test_jcop_backend_reschedule_processing_task(jcop_generate_file_path, jcop_backend):
|
|
"""Reschedule the processing task for a malware detection record."""
|
|
file_path, _ = jcop_generate_file_path
|
|
malware_detection = factories.MalwareDetectionFactory(
|
|
path=file_path,
|
|
status=MalwareDetectionStatus.PROCESSING,
|
|
backend="lasuite.malware_detection.backends.jcop.JCOPBackend",
|
|
)
|
|
|
|
with mock.patch.object(analyse_file_async, "delay") as analyse_file_async_mock:
|
|
jcop_backend.reschedule_processing_task(malware_detection)
|
|
analyse_file_async_mock.assert_called_once_with(
|
|
file_path,
|
|
)
|
|
|
|
|
|
def test_jcop_backend_reschedule_processing_missing_file(jcop_backend):
|
|
"""Reschedule the processing task for a malware detection record with a missing file."""
|
|
file_path = "file.txt"
|
|
malware_detection = factories.MalwareDetectionFactory(
|
|
path=file_path,
|
|
status=MalwareDetectionStatus.PROCESSING,
|
|
backend="lasuite.malware_detection.backends.jcop.JCOPBackend",
|
|
)
|
|
|
|
with mock.patch.object(analyse_file_async, "delay") as analyse_file_async_mock:
|
|
jcop_backend.reschedule_processing_task(malware_detection)
|
|
analyse_file_async_mock.assert_not_called()
|
|
|
|
assert not MalwareDetection.objects.filter(path=file_path).exists()
|
|
|
|
|
|
def test_jcop_backend_reschedule_processing_not_processing(jcop_backend):
|
|
"""Reschedule the processing task for a malware detection record with a not processing status."""
|
|
file_path = "file.txt"
|
|
malware_detection = factories.MalwareDetectionFactory(
|
|
path=file_path,
|
|
status=MalwareDetectionStatus.PENDING,
|
|
backend="lasuite.malware_detection.backends.jcop.JCOPBackend",
|
|
)
|
|
|
|
with mock.patch.object(analyse_file_async, "delay") as analyse_file_async_mock:
|
|
jcop_backend.reschedule_processing_task(malware_detection)
|
|
analyse_file_async_mock.assert_not_called()
|
|
|
|
assert MalwareDetection.objects.filter(path=file_path).exists()
|
|
|
|
|
|
def test_jcop_backend_reschedule_processing_not_jcop_backend(jcop_backend):
|
|
"""Reschedule the processing task for a malware detection record with a not JCOP backend."""
|
|
file_path = "file.txt"
|
|
malware_detection = factories.MalwareDetectionFactory(
|
|
path=file_path,
|
|
status=MalwareDetectionStatus.PROCESSING,
|
|
backend="lasuite.malware_detection.backends.dummy.DummyBackend",
|
|
)
|
|
|
|
with mock.patch.object(analyse_file_async, "delay") as analyse_file_async_mock:
|
|
jcop_backend.reschedule_processing_task(malware_detection)
|
|
analyse_file_async_mock.assert_not_called()
|
|
|
|
assert MalwareDetection.objects.filter(path=file_path).exists()
|