package chroma

import (
	
)

// An Emitter takes group matches and returns tokens.
type Emitter interface {
	// Emit tokens for the given regex groups.
	Emit(groups []string, state *LexerState) Iterator
}

// SerialisableEmitter is an Emitter that can be serialised and deserialised to/from JSON.
type SerialisableEmitter interface {
	Emitter
	EmitterKind() string
}

// EmitterFunc is a function that is an Emitter.
type EmitterFunc func(groups []string, state *LexerState) Iterator

// Emit tokens for groups.
func ( EmitterFunc) ( []string,  *LexerState) Iterator {
	return (, )
}

type Emitters []Emitter

type byGroupsEmitter struct {
	Emitters
}

// ByGroups emits a token for each matching group in the rule's regex.
func ( ...Emitter) Emitter {
	return &byGroupsEmitter{Emitters: }
}

func ( *byGroupsEmitter) () string { return "bygroups" }

func ( *byGroupsEmitter) ( []string,  *LexerState) Iterator {
	 := make([]Iterator, 0, len()-1)
	if len(.Emitters) != len()-1 {
		 = append(, Error.Emit(, ))
		// panic(errors.Errorf("number of groups %q does not match number of emitters %v", groups, emitters))
	} else {
		for ,  := range [1:] {
			if .Emitters[] != nil {
				 = append(, .Emitters[].Emit([]string{}, ))
			}
		}
	}
	return Concaterator(...)
}

// ByGroupNames emits a token for each named matching group in the rule's regex.
func ( map[string]Emitter) Emitter {
	return EmitterFunc(func( []string,  *LexerState) Iterator {
		 := make([]Iterator, 0, len(.NamedGroups)-1)
		if len(.NamedGroups)-1 == 0 {
			if ,  := [`0`];  {
				 = append(, .Emit(, ))
			} else {
				 = append(, Error.Emit(, ))
			}
		} else {
			 := .Rules[.State][.Rule].Regexp
			for  := 1;  < len(.NamedGroups); ++ {
				 := .GroupNameFromNumber()
				 := .NamedGroups[]
				if ,  := [];  {
					if  != nil {
						 = append(, .Emit([]string{}, ))
					}
				} else {
					 = append(, Error.Emit([]string{}, ))
				}
			}
		}
		return Concaterator(...)
	})
}

// UsingByGroup emits tokens for the matched groups in the regex using a
// sublexer. Used when lexing code blocks where the name of a sublexer is
// contained within the block, for example on a Markdown text block or SQL
// language block.
//
// An attempt to load the sublexer will be made using the captured value from
// the text of the matched sublexerNameGroup. If a sublexer matching the
// sublexerNameGroup is available, then tokens for the matched codeGroup will
// be emitted using the sublexer. Otherwise, if no sublexer is available, then
// tokens will be emitted from the passed emitter.
//
// Example:
//
//	var Markdown = internal.Register(MustNewLexer(
//		&Config{
//			Name:      "markdown",
//			Aliases:   []string{"md", "mkd"},
//			Filenames: []string{"*.md", "*.mkd", "*.markdown"},
//			MimeTypes: []string{"text/x-markdown"},
//		},
//		Rules{
//			"root": {
//				{"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)",
//					UsingByGroup(
//						2, 4,
//						String, String, String, Text, String,
//					),
//					nil,
//				},
//			},
//		},
//	))
//
// See the lexers/markdown.go for the complete example.
//
// Note: panic's if the number of emitters does not equal the number of matched
// groups in the regex.
func (,  int,  ...Emitter) Emitter {
	return &usingByGroup{
		SublexerNameGroup: ,
		CodeGroup:         ,
		Emitters:          ,
	}
}

type usingByGroup struct {
	SublexerNameGroup int      `xml:"sublexer_name_group"`
	CodeGroup         int      `xml:"code_group"`
	Emitters          Emitters `xml:"emitters"`
}

func ( *usingByGroup) () string { return "usingbygroup" }
func ( *usingByGroup) ( []string,  *LexerState) Iterator {
	// bounds check
	if len(.Emitters) != len()-1 {
		panic("UsingByGroup expects number of emitters to be the same as len(groups)-1")
	}

	// grab sublexer
	 := .Registry.Get([.SublexerNameGroup])

	// build iterators
	 := make([]Iterator, len()-1)
	for ,  := range [1:] {
		if  == .CodeGroup-1 &&  != nil {
			var  error
			[],  = .Tokenise(nil, [.CodeGroup])
			if  != nil {
				panic()
			}
		} else if .Emitters[] != nil {
			[] = .Emitters[].Emit([]string{}, )
		}
	}
	return Concaterator(...)
}

// UsingLexer returns an Emitter that uses a given Lexer for parsing and emitting.
//
// This Emitter is not serialisable.
func ( Lexer) Emitter {
	return EmitterFunc(func( []string,  *LexerState) Iterator {
		,  := .Tokenise(&TokeniseOptions{State: "root", Nested: true}, [0])
		if  != nil {
			panic()
		}
		return 
	})
}

type usingEmitter struct {
	Lexer string `xml:"lexer,attr"`
}

func ( *usingEmitter) () string { return "using" }

func ( *usingEmitter) ( []string,  *LexerState) Iterator {
	if .Registry == nil {
		panic(fmt.Sprintf("no LexerRegistry available for Using(%q)", .Lexer))
	}
	 := .Registry.Get(.Lexer)
	if  == nil {
		panic(fmt.Sprintf("no such lexer %q", .Lexer))
	}
	,  := .Tokenise(&TokeniseOptions{State: "root", Nested: true}, [0])
	if  != nil {
		panic()
	}
	return 
}

// Using returns an Emitter that uses a given Lexer reference for parsing and emitting.
//
// The referenced lexer must be stored in the same LexerRegistry.
func ( string) Emitter {
	return &usingEmitter{Lexer: }
}

type usingSelfEmitter struct {
	State string `xml:"state,attr"`
}

func ( *usingSelfEmitter) () string { return "usingself" }

func ( *usingSelfEmitter) ( []string,  *LexerState) Iterator {
	,  := .Lexer.Tokenise(&TokeniseOptions{State: .State, Nested: true}, [0])
	if  != nil {
		panic()
	}
	return 
}

// UsingSelf is like Using, but uses the current Lexer.
func ( string) Emitter {
	return &usingSelfEmitter{}
}