# (c) cavaliba.com - tests / submit_pipeline + task_pipeline

from unittest.mock import patch

import app_home.cache as cache
import yaml
from app_data.loader import load_broker
from app_data.models import DataTask
from app_data.task_manager import abort_task
from app_data.tasks import submit_pipeline, task_pipeline
from django.core.cache import cache as cache_django
from django.test import TestCase
from tests import helper

AAA_ADMIN = {'perms': ['p_data_admin', 'p_pipeline_run']}


def add_noop_pipeline(keyname="test_pipeline"):
    datalist = yaml.safe_load(f'''
        - classname: _pipeline
          keyname: {keyname}
          displayname: {keyname}
          is_enabled: True
          content: |
              tasks:
              - field_noop: ['']
        ''')
    load_broker(datalist=datalist, aaa={'perms': ['p_pipeline_create']})


def add_pipeline_with_permission(keyname="restricted_pipeline", run_permission="p_pipeline_run"):
    datalist = yaml.safe_load(f'''
        - classname: _pipeline
          keyname: {keyname}
          displayname: {keyname}
          is_enabled: True
          run_permission: {run_permission}
          content: |
              tasks:
              - field_noop: ['']
        ''')
    load_broker(datalist=datalist, aaa={'perms': ['p_pipeline_create']})


class SubmitPipelineTest(TestCase):

    fixtures = ["init"]

    def setUp(self):
        helper.add_admin_user(login="unittest")
        cache.clear()
        cache_django.clear()
        helper.add_schema(classname="test")
        helper.add_instance(classname="test", keyname="obj01")
        add_noop_pipeline()

    def test_submit_returns_handle(self):
        handle, err = submit_pipeline("test_pipeline", ["test"], aaa=AAA_ADMIN, sync=True)
        self.assertIsNotNone(handle)
        self.assertIsNone(err)

    def test_submit_unknown_pipeline_returns_error(self):
        handle, err = submit_pipeline("no_such_pipeline", ["test"], aaa=AAA_ADMIN, sync=True)
        self.assertIsNone(handle)
        self.assertEqual(err, "pipeline not found")

    def test_submit_creates_datatask(self):
        handle, err = submit_pipeline("test_pipeline", ["test"], aaa=AAA_ADMIN, sync=True)
        dt = DataTask.objects.get(handle=handle)
        self.assertIsNotNone(dt)
        self.assertEqual(dt.owner_type, "api")

    def test_submit_counts_total_instances(self):
        helper.add_instance(classname="test", keyname="obj02")
        handle, err = submit_pipeline("test_pipeline", ["test"], aaa=AAA_ADMIN, sync=True)
        dt = DataTask.objects.get(handle=handle)
        self.assertEqual(dt.params.get("total_instances"), 2)

    def test_submit_counts_user_instances(self):
        user_count = DataTask.objects.all().count()  # baseline
        from app_user.models import SireneUser
        expected = SireneUser.objects.count()
        handle, err = submit_pipeline("test_pipeline", ["_user"], aaa=AAA_ADMIN, sync=True)
        dt = DataTask.objects.get(handle=handle)
        self.assertEqual(dt.params.get("total_instances"), expected)

    def test_submit_sync_task_is_done(self):
        handle, err = submit_pipeline("test_pipeline", ["test"], aaa=AAA_ADMIN, sync=True)
        dt = DataTask.objects.get(handle=handle)
        self.assertEqual(dt.state, "DONE")

    def test_submit_async_task_is_queued_or_done(self):
        from django.test import override_settings
        with override_settings(CELERY_TASK_ALWAYS_EAGER=True):
            handle, err = submit_pipeline("test_pipeline", ["test"], aaa=AAA_ADMIN, sync=False)
        self.assertIsNotNone(handle)
        dt = DataTask.objects.get(handle=handle)
        self.assertIn(dt.state, ["DONE", "QUEUED", "RUNNING"])


class TaskPipelineOutputTest(TestCase):

    fixtures = ["init"]

    def setUp(self):
        helper.add_admin_user(login="unittest")
        cache.clear()
        cache_django.clear()
        helper.add_schema(classname="test")
        add_noop_pipeline()

    def test_output_structure_on_done(self):
        helper.add_instance(classname="test", keyname="obj01")
        handle, _ = submit_pipeline("test_pipeline", ["test"], aaa=AAA_ADMIN, sync=True)
        dt = DataTask.objects.get(handle=handle)
        output = dt.output
        self.assertIn("pipeline", output)
        self.assertIn("dryrun", output)
        self.assertIn("total_ok", output)
        self.assertIn("total_discarded", output)
        self.assertIn("total_errors", output)
        self.assertIn("results", output)

    def test_output_counts_ok(self):
        helper.add_instance(classname="test", keyname="obj01")
        helper.add_instance(classname="test", keyname="obj02")
        handle, _ = submit_pipeline("test_pipeline", ["test"], aaa=AAA_ADMIN, sync=True)
        output = DataTask.objects.get(handle=handle).output
        self.assertEqual(output["total_ok"], 2)
        self.assertEqual(output["total_discarded"], 0)
        self.assertEqual(output["total_errors"], 0)

    def test_output_dryrun_no_save(self):
        helper.add_instance(classname="test", keyname="obj01", fields={"displayname": "original"})
        handle, _ = submit_pipeline("test_pipeline", ["test"], aaa=AAA_ADMIN, dryrun=True, sync=True)
        output = DataTask.objects.get(handle=handle).output
        self.assertTrue(output["dryrun"])
        self.assertEqual(output["total_ok"], 1)

    def test_output_multi_schema(self):
        helper.add_schema(classname="test2")
        helper.add_instance(classname="test", keyname="obj01")
        helper.add_instance(classname="test2", keyname="obj02")
        handle, _ = submit_pipeline("test_pipeline", ["test", "test2"], aaa=AAA_ADMIN, sync=True)
        output = DataTask.objects.get(handle=handle).output
        self.assertEqual(output["total_ok"], 2)
        schemas = [r["schema"] for r in output["results"]]
        self.assertIn("test", schemas)
        self.assertIn("test2", schemas)

    def test_output_empty_schema(self):
        handle, _ = submit_pipeline("test_pipeline", ["test"], aaa=AAA_ADMIN, sync=True)
        output = DataTask.objects.get(handle=handle).output
        self.assertEqual(output["total_ok"], 0)
        self.assertEqual(output["total_discarded"], 0)

    def test_failed_state_on_missing_pipeline(self):
        handle, _ = submit_pipeline("test_pipeline", ["test"], aaa=AAA_ADMIN, sync=True)
        # Simulate task called with nonexistent pipeline name directly
        handle2, _ = submit_pipeline.__wrapped__("no_such", ["test"], AAA_ADMIN) if False else (None, None)
        # Direct task_pipeline call with bad params
        from app_data.task_manager import create_datatask
        dt = create_datatask("bad pipeline", params={"pipeline_name": "no_such", "schema_names": ["test"], "dryrun": False, "aaa": AAA_ADMIN, "total_instances": 0})
        task_pipeline(str(dt.handle), dt.params)
        dt.refresh_from_db()
        self.assertEqual(dt.state, "FAILED")
        self.assertIn("error", dt.output)


class TaskPipelinePermissionTest(TestCase):

    fixtures = ["init"]

    def setUp(self):
        helper.add_admin_user(login="unittest")
        cache.clear()
        cache_django.clear()
        helper.add_schema(classname="test")
        helper.add_instance(classname="test", keyname="obj01")
        add_pipeline_with_permission(run_permission="p_special_perm")

    def test_permission_denied_sets_failed(self):
        aaa_no_perm = {'perms': ['p_data_admin']}
        from app_data.task_manager import create_datatask
        params = {
            "pipeline_name": "restricted_pipeline",
            "schema_names": ["test"],
            "dryrun": False,
            "aaa": aaa_no_perm,
            "total_instances": 1,
        }
        dt = create_datatask("restricted", params=params)
        task_pipeline(str(dt.handle), params)
        dt.refresh_from_db()
        self.assertEqual(dt.state, "FAILED")
        self.assertIn("permission denied", dt.output.get("error", ""))

    def test_permission_granted_runs_ok(self):
        aaa_with_perm = {'perms': ['p_data_admin', 'p_special_perm']}
        handle, err = submit_pipeline("restricted_pipeline", ["test"], aaa=aaa_with_perm, sync=True)
        self.assertIsNotNone(handle)
        dt = DataTask.objects.get(handle=handle)
        self.assertEqual(dt.state, "DONE")


class TaskPipelineAbortTest(TestCase):

    fixtures = ["init"]

    def setUp(self):
        helper.add_admin_user(login="unittest")
        cache.clear()
        cache_django.clear()
        helper.add_schema(classname="test")
        add_noop_pipeline()

    def test_abort_before_run_skips_task(self):
        # Create 110 instances to cross the BATCH_SIZE=100 boundary
        for i in range(110):
            helper.add_instance(classname="test", keyname=f"obj{i:03d}")

        from app_data.task_manager import create_datatask
        params = {
            "pipeline_name": "test_pipeline",
            "schema_names": ["test"],
            "dryrun": True,
            "aaa": AAA_ADMIN,
            "total_instances": 110,
        }
        dt = create_datatask("abort test", params=params)
        # Abort before task runs
        abort_task(str(dt.handle))
        task_pipeline(str(dt.handle), params)
        dt.refresh_from_db()
        # Task should exit immediately on ABORTED state check
        self.assertEqual(dt.state, "ABORTED")

    def test_abort_mid_run_via_mock(self):
        for i in range(110):
            helper.add_instance(classname="test", keyname=f"obj{i:03d}")

        abort_calls = {"count": 0}

        def fake_is_aborted(handle):
            abort_calls["count"] += 1
            # Abort after first batch check
            return abort_calls["count"] >= 1

        from app_data.task_manager import create_datatask
        params = {
            "pipeline_name": "test_pipeline",
            "schema_names": ["test"],
            "dryrun": True,
            "aaa": AAA_ADMIN,
            "total_instances": 110,
        }
        dt = create_datatask("mid-abort test", params=params)
        with patch("app_data.tasks.is_aborted", side_effect=fake_is_aborted):
            task_pipeline(str(dt.handle), params)
        dt.refresh_from_db()
        self.assertEqual(dt.state, "ABORTED")
