# (c) cavaliba.com

#  command / cavaliba.py

import json
import os
import sys
import time

import yaml
from app_data.data import Instance
from app_data.eav import eav_purge, eav_refresh

# load
from app_data.loader import load_broker, load_file_csv, load_file_json, load_file_yaml
from app_data.models import DataClass, DataInstance, DataTask
from app_data.pipeline import Pipeline
from app_data.schema import Schema
from app_data.tasks import submit_pipeline
from app_home.cavaliba import cavaliba_start

# conf
from app_home.configuration import conf_list, set_configuration, sync_configuration

#from app_data.exporter import listdict_class_export
from app_home.export import home_export_dict

# log
from app_home.log import log_get_count, log_list, purge, purge_all
from app_home.models import DashboardApp
from app_user.aaa import start_command
from app_user.group import group_listdict_format

# export
from app_user.models import SireneGroup, SirenePermission, SireneUser
from app_user.permission import permission_all_keynames, permission_listdict_format
from app_user.role import role_listdict_format
from app_user.user import user_listdict_format
from django.conf import settings
from django.core.management.base import BaseCommand
from django.forms.models import model_to_dict

# export
SCHEMA_LIST = ["user","group","role","permission", "home",
               "sirene_message", "sirene_template", "sirene_public",
               "apikey", "dataview","enumerate","pipeline",
               "dataclass"
               ]

# YAML export
class MyYamlDumper(yaml.SafeDumper):
    def write_line_break(self, data=None):
        super().write_line_break(data)
        if len(self.indents) < 2:
            super().write_line_break()






# from cavaliba cli mgmt command
def listdict_class_export(classes=None, keyname=None, first=0, last=9999999999, refs=None, rev=None):

    datalist = []

    if not classes:
        classes = Schema.listall_names()

    for classname in classes:

        if keyname:
            instance = Instance.from_keyname(classname=classname, keyname=keyname)
            if instance:
                data = instance.get_dict_for_export(refs=refs, rev=rev)
                datalist.append(data)
        else:
            for instance in Instance.iterate_classname(classname=classname, first=first, last=last):
                data = instance.get_dict_for_export(refs=refs, rev=rev)
                datalist.append(data)


    return datalist









class Command(BaseCommand):

    help = 'manage cavaliba configuration'

    # https://docs.python.org/3/library/argparse.html#name-or-flags
    def add_arguments(self, parser):

        parser.add_argument('--verbose', action='store_true', help='Verbose mode' )

        parser.add_argument('action', type=str,  help="version|init|log|conf|load|schema|get|cache|pipeline" )

        # log
        parser.add_argument('--log_purge_all', action='store_true', help='log: purge ALL log entries' )
        parser.add_argument('--log_purge', action='store_true', help='log: purge OLDER log entries' )

        #  conf
        #parser.add_argument('--conf_list', action='store_true', help="conf: list conf entries")
        parser.add_argument('--conf_file', type=str, help="conf: load conf file")
        parser.add_argument('--conf_sync', action='store_true', help="conf: sync new default entries")

        # load
        parser.add_argument('--pipeline', type=str, help="load: pipeline to preprocess data")
        parser.add_argument('--pass', type=int, default=1, help="load: default 1 , increase number for relationships between objects")
        parser.add_argument('--dryrun', action='store_true', help='load: Dry run, no loading. Use verbose to see data' )
        parser.add_argument('--sync', action='store_true', help='pipeline: run synchronously in current process (default: async via Celery with polling)' )
        parser.add_argument('--async', action='store_true', help='pipeline: submit async task, poll for completion' )
        parser.add_argument('--progress', action='store_true', help='load: Display progress' )
        parser.add_argument('--force_action', type=str, help="load: override _action field in files")
        parser.add_argument('filenames', nargs='*', type=str)

        # schema
        parser.add_argument('--schema', type=str, help="schema, get: schema to query")
        parser.add_argument('--key', type=str, help="schema, get: specific instance/keyname to query")
        parser.add_argument('--refs', type=str, help="get: comma-separated list of types to inline as _refs (e.g. _user,_group,server or *)")
        parser.add_argument('--rev', type=int, help="get: number of revisions to include as _revision")

        # get (data instance)
        # --schema
        # --key
        # --first, --last

        # eav/cache
        parser.add_argument('--cache_refresh', action='store_true', help='cache: Rrefresh EAV cache' )
        parser.add_argument('--cache_purge', action='store_true', help='cache: Purge EAV cache orphans' )


        #  common / global
        parser.add_argument('--first', type=int, help="first item/line to process")
        parser.add_argument('--last', type=int, help="last item/line to process")
        parser.add_argument('--yaml', action='store_true', help='use YAML format (in/out)' )
        parser.add_argument('--json', action='store_true', help='use JSON format (in/out)' )
        parser.add_argument('--text', action='store_true', help='use TEXT format (in/out)' )
        parser.add_argument('--force', action='store_true', help='Force action (EAV, ...)' )




    def handle(self, *args, **options):

        action = options.get("action", None)

        valid_action = [ "start", "log", "conf", "load", "schema", "get", "version", "cache", "eav", "pipeline", "test"]
        if action:
            if action not in valid_action:
                self.stderr.write("unknown action: " + action)
                self.stderr.write("valid action: " +  ' '.join(valid_action) )
                return

        start_command(command="cavaliba", action=action)

        verbose = options.get("verbose", False)

        # CACHE/EAV
        if action == "cache" or action == "eav":

            self.stdout.write("cache command started...\n")

            purge = options.get("cache_purge", False)
            refresh = options.get("cache_refresh", False)
            #verbose = options.get("verbose", False)
            dryrun = options.get("dryrun")
            force = options.get("force", False)
            progress = options.get("progress", False)

            if dryrun:
                self.stderr.write("** dryrun **")
            if force:
                self.stderr.write("** force **")

            if purge:
                self.stdout.write("cache purge started...\n")
                eav_purge(verbose=verbose, dryrun=dryrun, progress=progress)
                self.stdout.write("cache purge done.\n")

            if refresh:
                self.stdout.write("cache refesh started...\n")
                eav_refresh(verbose=verbose, dryrun=dryrun, force=force, progress=progress)
                self.stdout.write("cache refresh done.\n")

            self.stdout.write("cache command done.\n")

        # START
        elif action == "start":
            cavaliba_start()

        #  LOAD
        elif action == "load":
            self.cavaliba_load(options)

        # LOG
        elif action == "log":
            self.cavaliba_log(options)

        # CONF
        elif action == "conf":
            self.cavaliba_conf(options)

        # SCHEMA
        elif action == "get":
            self.cavaliba_get(options)

        # GET
        elif action == "schema":
            self.cavaliba_schema(options)

        #  VERSION
        elif action == "version":
            self.stdout.write(f"{settings.CAVALIBA_VERSION}\n")


        # PIPELINE
        elif action == "pipeline":
            self.cavaliba_pipeline(options)

        #  TEST
        elif action == "test":
            print("TEST")
            schemas = Schema.listall()
            for schema in schemas:
                print("SCHEMA options: ", schema.classname )
                #schema.save()


    # ----------------------------------------------------------------
    # pipeline
    # ----------------------------------------------------------------
    # python manage.py cavaliba pipeline --pipeline NAME --schema s1,s2,_user [--dryrun]

    def cavaliba_pipeline(self, options):

        pipeline_name = options.get("pipeline", None)
        if not pipeline_name:
            self.stderr.write("ERR - please provide a pipeline name with --pipeline")
            return

        schema_raw = options.get("schema", None)
        if not schema_raw:
            self.stderr.write("ERR - please provide at least one schema with --schema")
            return

        schema_names = [s.strip() for s in schema_raw.split(',') if s.strip()]
        if not schema_names:
            self.stderr.write("ERR - invalid --schema value")
            return

        dryrun = options.get("dryrun", False)
        verbose = options.get("verbose", False)
        sync = options.get("sync", False)

        aaa = {"perms": permission_all_keynames()}

        self.stdout.write(f"pipeline:  {pipeline_name}")
        self.stdout.write(f"schemas:   {', '.join(schema_names)}")
        self.stdout.write(f"dryrun:    {dryrun}")
        self.stdout.write(f"sync:      {sync}")

        handle, err = submit_pipeline(
            pipeline_name=pipeline_name,
            schema_names=schema_names,
            dryrun=dryrun,
            aaa=aaa,
            owner_type="cli",
            owner_id="system",
            sync=sync,
        )
        if not handle:
            self.stderr.write(f"ERR - {err}")
            return

        self.stdout.write(f"handle:    {handle}")

        if sync:
            dt = DataTask.objects.get(handle=handle)
        else:
            TERMINAL_STATES = {"DONE", "FAILED", "ABORTED"}
            while True:
                dt = DataTask.objects.get(handle=handle)
                self.stdout.write(f"  state={dt.state}  {dt.percent}%  {dt.count}/{dt.total}  {dt.message or ''}")
                if dt.state in TERMINAL_STATES:
                    break
                time.sleep(2)

        output = dt.output or {}
        for entry in output.get("results", []):
            schemaname = entry.get("schema", "?")
            count_ok = entry.get("count_ok", 0)
            count_discarded = entry.get("count_discarded", 0)
            errors = entry.get("errors", [])
            self.stdout.write(f"  {schemaname:30} ok={count_ok}  discarded={count_discarded}  errors={len(errors)}")
            if verbose and errors:
                for e in errors:
                    self.stderr.write(f"    ERR: {e}")

        self.stdout.write(f"total:     ok={output.get('total_ok', 0)}  discarded={output.get('total_discarded', 0)}  errors={output.get('total_errors', 0)}")
        if dt.state == "FAILED":
            self.stderr.write(f"ERR - task failed: {output.get('error', '')}")


    # ----------------------------------------------------------------
    # conf
    #  ----------------------------------------------------------------
    # sync / export / import

    def cavaliba_conf(self, options):


        # sync from default ?
        if options.get("conf_sync", None):
            verbose = options.get("verbose", False)
            sync_configuration(verbose=verbose)
            sys.stdout.write("Conf synced\n")
            return

        # load conf?
        filename = options.get("conf_file", None)
        if filename:
            self.cavaliba_conf_import(options)

        # export conf
        else:
            self.cavaliba_conf_export(options)



    def cavaliba_conf_import(self, options):

        verbose = options.get("verbose", False)

        format_yaml = options.get("yaml")
        format_json = options.get("json")
        if not format_yaml and not format_json:
            format_yaml = True

        filename = options.get("conf_file", None)
        key = options.get("key", None)

        self.stdout.write(f"CONF: load file : {filename}\n")

        datalist = None
        if format_yaml:
            with open(filename) as f:
                try:
                    datalist = yaml.load(f, Loader=yaml.SafeLoader)
                except Exception as e:
                    sys.stderr.write(f"Can't load {filename} : {e} ")


        elif format_json:
            with open(filename) as f:
                try:
                    datalist = json.load(f)
                except Exception as e:
                    sys.stderr.write(f"Can't load {filename} : {e}")


        for item in datalist:
            # appname, keyname, value, page , description
            #print (item['appname'], item['keyname'], item['value'])
            appname = item['appname']
            keyname = item['keyname']
            value = item['value']
            description = item['description']
            page = item['page']
            order = item['order']
            if key:
                if key != keyname:
                    continue
            dbentry = set_configuration(
                init_only=False,
                appname=appname,
                keyname=keyname,
                value=value,
                description=description,
                order=order,
                page=page,
                )

            if verbose:
                if dbentry:
                    self.stdout.write(f"ADDED : {appname:10} {keyname:40} {value}" )



    def cavaliba_conf_export(self, options):

        key = options.get("key", None)

        format_yaml = options.get("yaml")
        format_json = options.get("json")


        dict_attributs =  [ "appname", "keyname", "value", "description", "order", "page"]
        conf_objects = conf_list()
        datalist = []

        for entry in conf_objects:
            m = model_to_dict(entry, fields=dict_attributs)
            if key:
                if m['keyname'] != key:
                    continue
            datalist.append(m)

        if format_yaml:
            filedata = yaml.dump(datalist, allow_unicode=True, Dumper=MyYamlDumper, sort_keys=False)
            self.stdout.write(f"{filedata}")

        elif format_json:
            filedata = json.dumps(datalist, indent=4, ensure_ascii = False)
            self.stdout.write(f"{filedata}")

        # default format_text
        else:
            for i in datalist:
                appname = i['appname']
                keyname = i['keyname']
                value = i['value']
                self.stdout.write(f"{appname:10} {keyname:40} {value}" )





    # ----------------------------------------------------------------
    # log
    #  ----------------------------------------------------------------

    def cavaliba_log(self, options):

        #want_list = options.get("log_list")
        want_purge = options.get("log_purge",False)
        want_purge_all = options.get("log_purge_all",False)

        first = options.get("first")
        if not first:
            first = 0

        last = options.get("last")
        if not last:
            last = 10

        if last > first:
            size = last - first
        else:
            size = 10


        count = log_get_count()
        self.stdout.write(f"log entries: {count}\n")

        if want_purge:
            count = purge()
            self.stdout.write(f"log deleted (old) : {count} entries")

        elif want_purge_all:
            count = purge_all()
            self.stdout.write(f"log deleted (all) : {count} entries")

        else:
            r = log_list(first,size)
            for i in r:
                self.stdout.write(f"{i}")


        self.stdout.write("log done\n")




    # ----------------------------------------------------------------
    # load
    #  ----------------------------------------------------------------
    # load data from CSV/JSON/YAML file to DB

    def cavaliba_load(self, options):

        verbose = options.get("verbose", True)

        self.stdout.write("=" * 60)

        first = options.get("first")
        if not first:
            first = 1

        last = options.get("last")
        if not last:
            last = 0
        if last > 0 and last < first:
            self.stderr.write("ERR - invalid first/last values")
            return
        self.stdout.write(f"first/last:  {first}, {last}")


        # get pipeline
        pipeline = None
        pipeline_name = options.get("pipeline", None)
        if pipeline_name:
            self.stdout.write(f"pipeline:    {pipeline_name}")
            pipeline = Pipeline.from_name(pipeline_name)
            if not pipeline:
                self.stderr.write(f"ERR - invalid pipeline {pipeline_name}")
                return
            if verbose:
                pipeline.print()

        # force_action
        force_action = options.get("force_action", None)
        if force_action:
            if force_action not in ["create", "init","update", "delete", "enable", "disable"]:
                self.stderr.write(f"ERR - invalid force_action {force_action}")
                return
            self.stdout.write(f"force_action: {force_action}")

        # pass count
        passcount = options.get("pass", 1)
        self.stdout.write(f"pass:        {passcount}")

        # dryrun
        dryrun = options.get("dryrun", None)
        self.stdout.write(f"dryrun:      {dryrun}")

        # progress
        progress = options.get("progress", False)
        self.stdout.write(f"progress:    {progress}")

        # build file list ordered
        ordered = []
        for fileentry in options['filenames']:
            if os.path.isfile(fileentry):
                ordered.append(fileentry)
            elif os.path.isdir(fileentry):
                for item in os.scandir(fileentry):
                    if item.is_file(follow_symlinks=False):
                        if item.path not in ordered:
                            ordered.append(item.path)
                ordered.sort()
            else:
                self.stderr.write(f"SKIP - Unknown filename: {fileentry}")


        for passcurrent in range(1, passcount + 1):

            self.stdout.write()
            self.stdout.write(f"pass : {passcurrent} / {passcount}")

            for filename in ordered:

                self.stdout.write(f"File: {filename} ...")
                datalist = []

                # LOAD (use pipeline here for loading options only)
                # -------------------------------------------------
                # CSV
                if filename.endswith('.csv'):
                    datalist = load_file_csv(filename=filename, pipeline_name=pipeline_name, first=first, last=last)

                #  YAML
                elif filename.endswith('.yml') or filename.endswith('.yaml'):
                    datalist = load_file_yaml(filename=filename, pipeline_name=pipeline_name, verbose=verbose)

                # JSON
                elif filename.endswith('.json'):
                    datalist = load_file_json(filename, pipeline_name=pipeline_name, verbose=verbose)

                # ?
                else:
                    self.stderr.write(f"SKIP - Unknown filtetype ({filename})")
                    continue

                # check data
                # ----------
                if not datalist:
                    self.stderr.write(f"SKIP - no data in ({filename})")
                    continue

                if len(datalist) == 0:
                    self.stderr.write(f"SKIP - empty data in ({filename})")
                    continue

                self.stdout.write(f"Found: {len(datalist)} objects")

                if verbose:
                    self.stdout.write(json.dumps(datalist, indent=2, ensure_ascii=False))



                aaa = {}
                aaa["perms"] = permission_all_keynames()


                count = 0
                for datadict in datalist:

                    count += 1

                    if pipeline:
                        status = pipeline.apply(datadict)
                        if status == 'discard':
                            continue

                    if dryrun:
                        result = "dry-run - no write to DB"
                    else:
                        result = load_broker([datadict], aaa=aaa, force_action=force_action)


                    if verbose:
                        self.stdout.write(json.dumps(datadict, indent=2, ensure_ascii=False))
                        self.stdout.write(json.dumps(result, indent=2, ensure_ascii=False))


                self.stdout.write(f"load done for file {filename} - {count} objects\n")

            # END LOOP OVER PASS

        # END LOOP OVER FILES
        self.stdout.write("load done\n")



    # ----------------------------------------------------------------
    # schema
    #  ----------------------------------------------------------------

    def cavaliba_schema(self, options):

        schema = options.get("schema")

        format_yaml = options.get("yaml")
        format_json = options.get("json")
        format_text = options.get("text")
        if not format_yaml and not format_json and not format_text:
            format_yaml = True


        #  available schema & count
        # ------------------------
        if not schema:
            classes = DataClass.objects.all()

            self.stdout.write("")
            self.stdout.write("Regular Schemas")
            self.stdout.write("---------------")


            for c in classes:
                count = DataInstance.objects.filter(classname=c.keyname).count()
                self.stdout.write(f"{c.keyname:30} {count}")

            # special : user, group, role, permission, home

            self.stdout.write("")
            self.stdout.write("Special structures (not schemas)")
            self.stdout.write("--------------------------------")

            count = SireneUser.objects.count()
            name = "IAM/user (_user)"
            self.stdout.write(f"{name:30} {count}")

            count = SireneGroup.objects.filter(is_role=False).count()
            name = "IAM/group (_group)"
            self.stdout.write(f"{name:30} {count}")

            count = SireneGroup.objects.filter(is_role=True).count()
            name = "IAM/role (_role)"
            self.stdout.write(f"{name:30} {count}")

            count = SirenePermission.objects.count()
            name = "IAM/permission (_permisison)"
            self.stdout.write(f"{name:30} {count}")

            count = DashboardApp.objects.count()
            name = "Dashboard entries (_home)"
            self.stdout.write(f"{name:30} {count}")

        # Schema Structure as YAML
        # ------------------------
        else:

            schema = Schema.from_name(schema)
            if schema:
                rawdata = schema.to_yaml()
                self.stdout.write(f"{rawdata}")
            else:
                self.stderr.write("Schema not available")



    # ----------------------------------------------------------------
    # get / display an instance
    #  ----------------------------------------------------------------

    def cavaliba_get(self, options):

        schema = options.get("schema", None)
        if not schema:
            self.stderr.write("Please select a schema with --schema")
            return

        format_yaml = options.get("yaml")
        format_json = options.get("json")
        format_text = options.get("text")
        if not format_yaml and not format_json and not format_text:
            format_yaml = True

        key = options.get("key", None)

        refs_raw = options.get("refs", None)
        refs = [r.strip() for r in refs_raw.split(',') if r.strip()] if refs_raw else None
        rev = options.get("rev", None)

        first = options.get("first")
        if not first:
            first = 0

        last = options.get("last")
        if not last:
            last = 10

        filedata = []
        datalist = []


        # specific / non- data-schema structs
        # ------------------------------------
        # user
        if schema == "_user":
            if key:
                users = SireneUser.objects.filter(login=key)
            else:
                users = SireneUser.objects.filter()[first:last]
            datalist = user_listdict_format(users)

        # group
        elif schema == "_group":
            if key:
                groups = SireneGroup.objects\
                    .filter(is_role=False, keyname=key)\
                    .prefetch_related("users")\
                    .prefetch_related("subgroups")\
                    .order_by('keyname')
            else:
                groups = SireneGroup.objects\
                    .filter(is_role=False)\
                    .prefetch_related("users")\
                    .prefetch_related("subgroups")\
                    .order_by('keyname')[first:last]
            datalist = group_listdict_format(groups)

        # role
        elif schema == "_role":
            if key:
                roles = SireneGroup.objects\
                    .filter(is_role=True, keyname=key)\
                    .prefetch_related("permissions")\
                    .prefetch_related("users")\
                    .prefetch_related("subgroups")\
                    .order_by('keyname')

            else:
                roles = SireneGroup.objects\
                    .filter(is_role=True)\
                    .prefetch_related("permissions")\
                    .prefetch_related("users")\
                    .prefetch_related("subgroups")\
                    .order_by('keyname')[first:last]
            datalist = role_listdict_format(roles)

        # permission
        elif schema == "_permission":
            if key:
                permissions = SirenePermission.objects.filter(keyname=key)
            else:
                permissions = SirenePermission.objects.all()[first:last]
            datalist = permission_listdict_format(permissions)


        # home
        elif schema == "_home":
            datalist = home_export_dict(keyname=key)


        # regular data-schema structs
        #  ---------------------------
        else:
            classes = [schema]
            if key:
                datalist = listdict_class_export(classes=classes, keyname=key, refs=refs, rev=rev)
            else:
                datalist = listdict_class_export(classes=classes, first=first, last=last, refs=refs, rev=rev)

        # output
        # ------

        if format_yaml:
            filedata = yaml.dump(datalist, allow_unicode=True, Dumper=MyYamlDumper, sort_keys=False)

        elif format_json:
            filedata = json.dumps(datalist, indent=4, ensure_ascii=False)

        elif format_text:
            for d in datalist:
                login = d.get("login","")
                keyname = d.get("keyname",login)
                displayname = d.get("displayname","")
                print(f"{keyname:30} {displayname:40}")

        else:
            return

        self.stdout.write(f"{filedata}")
