/*
Copyright © 2025 cavaliba.com
*/
package cmd

import (
	"bytes"
	"encoding/csv"
	"encoding/json"
	"fmt"
	"io"
	"net/http"
	"os"
	"path/filepath"
	"strings"
	"time"

	"github.com/spf13/cobra"
	"github.com/spf13/viper"
)

// Local flags for import command
var batchSize int

// CSVChunk represents a batch of CSV rows with header
type CSVChunk struct {
	Header    []string   // CSV header row
	Rows      [][]string // Batch of data rows
	ChunkNum  int        // Chunk number (1-based)
	TotalRows int        // Rows in this chunk
	IsLast    bool       // Whether this is the last chunk
}

// ChunkIterator handles CSV file iteration
type ChunkIterator struct {
	file      *os.File
	reader    *csv.Reader
	header    []string
	batchSize int
	chunkNum  int
	eof       bool
}

// ImportSummary aggregates import results across chunks
type ImportSummary struct {
	TotalChunks   int
	SuccessChunks int
	FailedChunks  int
	TotalRows     int
	SuccessRows   int // From API responses
	FailedRows    int // From API responses
	Errors        []ChunkError
	Duration      time.Duration
}

// ChunkError captures chunk-level errors
type ChunkError struct {
	ChunkNum int
	HTTPCode int
	Message  string
}

// NewCSVChunkIterator creates a new chunk iterator for CSV files
func NewCSVChunkIterator(filepath string, batchSize int) (*ChunkIterator, error) {
	file, err := os.Open(filepath)
	if err != nil {
		return nil, fmt.Errorf("failed to open file: %w", err)
	}

	reader := csv.NewReader(file)
	// Use delimiter from global flag
	if delimiter != "" && len(delimiter) > 0 {
		reader.Comma = rune(delimiter[0])
	}
	reader.FieldsPerRecord = -1 // allow variable number of fields per row
	reader.LazyQuotes = true    // tolerate bare/unescaped quotes

	// Read header row
	header, err := reader.Read()
	if err != nil {
		file.Close()
		if err == io.EOF {
			return nil, fmt.Errorf("file is empty or has no header")
		}
		return nil, fmt.Errorf("failed to read header: %w", err)
	}

	return &ChunkIterator{
		file:      file,
		reader:    reader,
		header:    header,
		batchSize: batchSize,
		chunkNum:  0,
		eof:       false,
	}, nil
}

// Next returns the next chunk or io.EOF when done
func (ci *ChunkIterator) Next() (*CSVChunk, error) {
	if ci.eof {
		return nil, io.EOF
	}

	var rows [][]string
	for i := 0; i < ci.batchSize; i++ {
		record, err := ci.reader.Read()
		if err == io.EOF {
			ci.eof = true
			break
		}
		if err != nil {
			return nil, fmt.Errorf("failed to read CSV record: %w", err)
		}
		rows = append(rows, record)
	}

	if len(rows) == 0 {
		return nil, io.EOF
	}

	ci.chunkNum++
	chunk := &CSVChunk{
		Header:    ci.header,
		Rows:      rows,
		ChunkNum:  ci.chunkNum,
		TotalRows: len(rows),
		IsLast:    ci.eof,
	}

	return chunk, nil
}

// Close closes the file handle
func (ci *ChunkIterator) Close() error {
	if ci.file != nil {
		return ci.file.Close()
	}
	return nil
}

// chunkToCSV converts a CSVChunk back to CSV string format
func chunkToCSV(chunk *CSVChunk) (string, error) {
	var buf bytes.Buffer
	writer := csv.NewWriter(&buf)

	// Use delimiter from global flag
	if delimiter != "" && len(delimiter) > 0 {
		writer.Comma = rune(delimiter[0])
	}

	// Write header
	if err := writer.Write(chunk.Header); err != nil {
		return "", fmt.Errorf("failed to write header: %w", err)
	}

	// Write data rows
	for _, row := range chunk.Rows {
		if err := writer.Write(row); err != nil {
			return "", fmt.Errorf("failed to write row: %w", err)
		}
	}

	writer.Flush()
	if err := writer.Error(); err != nil {
		return "", fmt.Errorf("CSV writer error: %w", err)
	}

	return buf.String(), nil
}

// parseImportResponse parses the Cavaliba API JSON response
// Expected format:
//
//	{
//	  "count": 5,
//	  "count_ok": 0,
//	  "count_ko": 5,
//	  "errors": ["error message 1", "error message 2", ...]
//	}
func parseImportResponse(body string) (created, updated, errors int, errorMessages []string, err error) {
	var response map[string]interface{}
	if err := json.Unmarshal([]byte(body), &response); err != nil {
		// If JSON parsing fails, return zeros (non-fatal)
		return 0, 0, 0, nil, err
	}

	// Extract counts (default to 0 if missing)
	// count_ok = successfully created/updated records
	if val, ok := response["count_ok"].(float64); ok {
		created = int(val)
	}
	// count_ko = failed records
	if val, ok := response["count_ko"].(float64); ok {
		errors = int(val)
	}

	// Extract error messages array
	if errArray, ok := response["errors"].([]interface{}); ok {
		for _, errMsg := range errArray {
			if msg, ok := errMsg.(string); ok {
				errorMessages = append(errorMessages, msg)
			}
		}
	}

	return created, 0, errors, errorMessages, nil
}

// printImportSummary displays the final aggregated import summary
func printImportSummary(summary ImportSummary) {
	// Print human-readable summary to stderr
	fmt.Fprintln(os.Stderr, "\nImport Summary:")
	fmt.Fprintln(os.Stderr, "--------------")
	fmt.Fprintf(os.Stderr, "Total chunks:      %d\n", summary.TotalChunks)
	fmt.Fprintf(os.Stderr, "Successful chunks: %d\n", summary.SuccessChunks)
	fmt.Fprintf(os.Stderr, "Failed chunks:     %d\n", summary.FailedChunks)
	fmt.Fprintf(os.Stderr, "Total rows:        %d\n", summary.TotalRows)
	fmt.Fprintf(os.Stderr, "Success rows:      %d\n", summary.SuccessRows)
	fmt.Fprintf(os.Stderr, "Error rows:        %d\n", summary.FailedRows)
	fmt.Fprintf(os.Stderr, "Duration:          %.1fs\n", summary.Duration.Seconds())

	if len(summary.Errors) > 0 {
		fmt.Fprintln(os.Stderr, "\nChunk Errors:")
		for _, err := range summary.Errors {
			fmt.Fprintf(os.Stderr, "  Chunk %d: HTTP %d - %s\n", err.ChunkNum, err.HTTPCode, err.Message)
		}
	}

	// Print JSON-formatted summary to stdout (matching API response format)
	PrintOutput(fmt.Sprintf("{\n  \"count\": %d,\n  \"count_ok\": %d,\n  \"count_ko\": %d\n}",
		summary.TotalRows, summary.SuccessRows, summary.FailedRows))
}

// importCSVBatched handles chunked CSV import with batch processing
func importCSVBatched(filepath string, batchSize int, contentType string) {
	// Validate batch size
	if batchSize <= 0 {
		fmt.Fprintln(os.Stderr, "Error: batch-size must be > 0")
		os.Exit(1)
	}
	if batchSize > 1000 {
		fmt.Fprintf(os.Stderr, "Warning: batch-size > 1000 (%d) may cause memory issues\n", batchSize)
	}

	// Create iterator
	iter, err := NewCSVChunkIterator(filepath, batchSize)
	if err != nil {
		fmt.Fprintf(os.Stderr, "Error: %s\n", err)
		os.Exit(1)
	}
	defer iter.Close()

	// Initialize summary
	startTime := time.Now()
	summary := ImportSummary{
		Errors: []ChunkError{},
	}

	if verbose {
		target := APITarget{
			url:            viper.GetString("url") + "import/",
			method:         http.MethodPost,
			ssl_skipverify: viper.GetBool("ssl_skipverify"),
			content_type:   contentType,
		}
		PrintVerboseTarget(target)
	}

	fmt.Fprintln(os.Stderr, "Processing")
	fmt.Fprintln(os.Stderr, "----------")
	fmt.Fprintf(os.Stderr, "Importing %s in batches of %d rows...\n\n", filepath, batchSize)

	// Iterate over chunks
	for {
		chunk, err := iter.Next()
		if err == io.EOF {
			break
		}
		if err != nil {
			fmt.Fprintf(os.Stderr, "Error reading chunk: %s\n", err)
			os.Exit(1)
		}

		summary.TotalChunks++
		summary.TotalRows += chunk.TotalRows

		// Convert chunk to CSV string
		csvData, err := chunkToCSV(chunk)
		if err != nil {
			fmt.Fprintf(os.Stderr, "Chunk %d: FAILED (conversion error: %s)\n", chunk.ChunkNum, err)
			summary.FailedChunks++
			summary.Errors = append(summary.Errors, ChunkError{
				ChunkNum: chunk.ChunkNum,
				HTTPCode: 0,
				Message:  err.Error(),
			})
			continue
		}

		// POST to API
		target := APITarget{
			url:            viper.GetString("url") + "import/",
			method:         http.MethodPost,
			ssl_skipverify: viper.GetBool("ssl_skipverify"),
			content_type:   contentType,
			body:           csvData,
		}

		err = AppendGlobalOptions(&target)
		if err != nil {
			fmt.Fprintf(os.Stderr, "Chunk %d: FAILED (%s)\n", chunk.ChunkNum, err)
			summary.FailedChunks++
			summary.Errors = append(summary.Errors, ChunkError{
				ChunkNum: chunk.ChunkNum,
				HTTPCode: 0,
				Message:  err.Error(),
			})
			continue
		}

		// Display progress indicator
		fmt.Fprintf(os.Stderr, "Chunk %-3d - %d rows - ", chunk.ChunkNum, chunk.TotalRows)

		result, err := CallAPI(target)
		if err != nil {
			fmt.Fprintf(os.Stderr, "FAILED (%s)\n", err)
			summary.FailedChunks++
			summary.Errors = append(summary.Errors, ChunkError{
				ChunkNum: chunk.ChunkNum,
				HTTPCode: result.http_code,
				Message:  err.Error(),
			})
			continue
		}

		// Parse response
		created, _, apiErrors, errorMessages, parseErr := parseImportResponse(result.body)
		if parseErr != nil {
			// Non-fatal, just log
			if verbose {
				fmt.Fprintf(os.Stderr, "Warning: failed to parse response: %s\n", parseErr)
			}
		}

		summary.SuccessChunks++
		summary.SuccessRows += created
		summary.FailedRows += apiErrors

		fmt.Fprintf(os.Stderr, "DONE - success: %-4d - errors: %-4d\n", created, apiErrors)

		// Display error messages in verbose mode
		if verbose && len(errorMessages) > 0 {
			for _, errMsg := range errorMessages {
				fmt.Fprintf(os.Stderr, "    %s\n", errMsg)
			}
		}
	}

	// Calculate duration
	summary.Duration = time.Since(startTime)

	// Print summary
	printImportSummary(summary)

	// Exit code logic
	if summary.TotalChunks == 0 {
		fmt.Fprintln(os.Stderr, "No chunks were processed")
		os.Exit(1)
	}
	if summary.FailedChunks == summary.TotalChunks {
		os.Exit(1)
	}
}

// importCmd represents the import command
var importCmd = &cobra.Command{
	Use:   "import",
	Short: "Import data into a Cavaliba instance",
	Long:  `Import schemas and instances from YAML, JSON, or CSV files into a Cavaliba instance`,
	Run: func(cmd *cobra.Command, args []string) {

		if file == "" {
			fmt.Println("Error: --file flag is required for import command")
			os.Exit(1)
		}

		// Check if file exists
		if _, err := os.Stat(file); os.IsNotExist(err) {
			fmt.Printf("Error: file not found: %s\n", file)
			os.Exit(1)
		}

		// Detect content type based on file extension
		ext := strings.ToLower(filepath.Ext(file))
		var contentType string
		switch ext {
		case ".yaml", ".yml":
			contentType = "application/x-yaml"
		case ".json":
			contentType = "application/json"
		case ".csv":
			contentType = "text/csv"
		default:
			fmt.Printf("Error: unsupported file type: %s (supported: .yaml, .yml, .json, .csv)\n", ext)
			os.Exit(1)
		}

		// NEW: Route to batch mode if enabled for CSV
		if ext == ".csv" && batchSize > 0 {
			importCSVBatched(file, batchSize, contentType)
			return
		}

		// EXISTING: Legacy single-file import path
		// Read file contents
		fileContent, err := os.ReadFile(file)
		if err != nil {
			fmt.Printf("Error: failed to read file: %s\n", err)
			os.Exit(1)
		}

		endpoint := "import/"

		target := APITarget{
			url:            viper.GetString("url") + endpoint,
			method:         http.MethodPost,
			ssl_skipverify: viper.GetBool("ssl_skipverify"),
			content_type:   contentType,
			body:           string(fileContent),
		}

		err = AppendGlobalOptions(&target)
		if err != nil {
			fmt.Println(err)
			os.Exit(1)
		}

		PrintVerboseTarget(target)

		result, err := CallAPI(target)
		if err != nil {
			PrintError(result, err)
			os.Exit(0)
		}

		PrintVerboseResult(result)
		PrintOutput(result.body)

	},
}

func init() {
	rootCmd.AddCommand(importCmd)

	// Add import-specific flags
	importCmd.Flags().IntVar(&batchSize, "batch-size", 0, "CSV batch size for chunked import (0=disabled, default: 0)")
}
