package chroma

import (
	
)

type delegatingLexer struct {
	root     Lexer
	language Lexer
}

// DelegatingLexer combines two lexers to handle the common case of a language embedded inside another, such as PHP
// inside HTML or PHP inside plain text.
//
// It takes two lexer as arguments: a root lexer and a language lexer.  First everything is scanned using the language
// lexer, which must return "Other" for unrecognised tokens. Then all "Other" tokens are lexed using the root lexer.
// Finally, these two sets of tokens are merged.
//
// The lexers from the template lexer package use this base lexer.
func ( Lexer,  Lexer) Lexer {
	return &delegatingLexer{
		root:     ,
		language: ,
	}
}

func ( *delegatingLexer) ( string) float32 {
	return .root.AnalyseText()
}

func ( *delegatingLexer) ( func( string) float32) Lexer {
	.root.SetAnalyser()
	return 
}

func ( *delegatingLexer) ( *LexerRegistry) Lexer {
	.root.SetRegistry()
	.language.SetRegistry()
	return 
}

func ( *delegatingLexer) () *Config {
	return .language.Config()
}

// An insertion is the character range where language tokens should be inserted.
type insertion struct {
	start, end int
	tokens     []Token
}

func ( *delegatingLexer) ( *TokeniseOptions,  string) (Iterator, error) { // nolint: gocognit
	,  := Tokenise(Coalesce(.language), , )
	if  != nil {
		return nil, 
	}
	// Compute insertions and gather "Other" tokens.
	 := &bytes.Buffer{}
	 := []*insertion{}
	var  *insertion
	 := 0
	var  Token
	for ,  := range  {
		if .Type == Other {
			if  != EOF &&  != nil && .Type != Other {
				.end = 
			}
			.WriteString(.Value)
		} else {
			if  == EOF || .Type == Other {
				 = &insertion{start: }
				 = append(, )
			}
			.tokens = append(.tokens, )
		}
		 = 
		 += len(.Value)
	}

	if len() == 0 {
		return .root.Tokenise(, )
	}

	// Lex the other tokens.
	,  := Tokenise(Coalesce(.root), , .String())
	if  != nil {
		return nil, 
	}

	// Interleave the two sets of tokens.
	var  []Token
	 = 0 // Offset into text.
	 := 0
	 := func() Token {
		if  >= len() {
			return EOF
		}
		 := []
		++
		return 
	}
	 := 0
	 := func() *insertion {
		if  >= len() {
			return nil
		}
		 := []
		++
		return 
	}
	 := ()
	 := ()
	for  != EOF ||  != nil {
		// fmt.Printf("%d->%d:%q   %d->%d:%q\n", offset, offset+len(t.Value), t.Value, i.start, i.end, Stringify(i.tokens...))
		if  == EOF || ( != nil && .start < +len(.Value)) {
			var  Token
			,  = splitToken(, .start-)
			if  != EOF {
				 = append(, )
				 += len(.Value)
			}
			 = append(, .tokens...)
			 += .end - .start
			if  == EOF {
				 = ()
			}
			 = ()
		} else {
			 = append(, )
			 += len(.Value)
			 = ()
		}
	}
	return Literator(...), nil
}

func splitToken( Token,  int) ( Token,  Token) {
	if  == EOF {
		return EOF, EOF
	}
	if  == 0 {
		return EOF, 
	}
	if  == len(.Value) {
		return , EOF
	}
	 = .Clone()
	 = .Clone()
	.Value = .Value[:]
	.Value = .Value[:]
	return
}