package chroma

type remappingLexer struct {
	lexer  Lexer
	mapper func(Token) []Token
}

// RemappingLexer remaps a token to a set of, potentially empty, tokens.
func ( Lexer,  func(Token) []Token) Lexer {
	return &remappingLexer{, }
}

func ( *remappingLexer) ( string) float32 {
	return .lexer.AnalyseText()
}

func ( *remappingLexer) ( func( string) float32) Lexer {
	.lexer.SetAnalyser()
	return 
}

func ( *remappingLexer) ( *LexerRegistry) Lexer {
	.lexer.SetRegistry()
	return 
}

func ( *remappingLexer) () *Config {
	return .lexer.Config()
}

func ( *remappingLexer) ( *TokeniseOptions,  string) (Iterator, error) {
	,  := .lexer.Tokenise(, )
	if  != nil {
		return nil, 
	}
	var  []Token
	return func() Token {
		for {
			if len() > 0 {
				 := [0]
				 = [1:]
				return 
			}
			 := ()
			if  == EOF {
				return 
			}
			 = .mapper()
		}
	}, nil
}

// TypeMapping defines type maps for the TypeRemappingLexer.
type TypeMapping []struct {
	From, To TokenType
	Words    []string
}

// TypeRemappingLexer remaps types of tokens coming from a parent Lexer.
//
// eg. Map "defvaralias" tokens of type NameVariable to NameFunction:
//
//	mapping := TypeMapping{
//		{NameVariable, NameFunction, []string{"defvaralias"},
//	}
//	lexer = TypeRemappingLexer(lexer, mapping)
func ( Lexer,  TypeMapping) Lexer {
	// Lookup table for fast remapping.
	 := map[TokenType]map[string]TokenType{}
	for ,  := range  {
		,  := [.From]
		if ! {
			 = map[string]TokenType{}
			[.From] = 
		}
		if len(.Words) == 0 {
			[""] = .To
		} else {
			for ,  := range .Words {
				[] = .To
			}
		}
	}
	return RemappingLexer(, func( Token) []Token {
		if ,  := [.Type];  {
			if ,  := [.Value];  {
				.Type = 
			} else if ,  := [""];  {
				.Type = 
			}
		}
		return []Token{}
	})
}