package chroma

import (
	
	
	
)

var (
	ignoredSuffixes = [...]string{
		// Editor backups
		"~", ".bak", ".old", ".orig",
		// Debian and derivatives apt/dpkg/ucf backups
		".dpkg-dist", ".dpkg-old", ".ucf-dist", ".ucf-new", ".ucf-old",
		// Red Hat and derivatives rpm backups
		".rpmnew", ".rpmorig", ".rpmsave",
		// Build system input/template files
		".in",
	}
)

// LexerRegistry is a registry of Lexers.
type LexerRegistry struct {
	Lexers  Lexers
	byName  map[string]Lexer
	byAlias map[string]Lexer
}

// NewLexerRegistry creates a new LexerRegistry of Lexers.
func () *LexerRegistry {
	return &LexerRegistry{
		byName:  map[string]Lexer{},
		byAlias: map[string]Lexer{},
	}
}

// Names of all lexers, optionally including aliases.
func ( *LexerRegistry) ( bool) []string {
	 := []string{}
	for ,  := range .Lexers {
		 := .Config()
		 = append(, .Name)
		if  {
			 = append(, .Aliases...)
		}
	}
	sort.Strings()
	return 
}

// Get a Lexer by name, alias or file extension.
func ( *LexerRegistry) ( string) Lexer {
	if  := .byName[];  != nil {
		return 
	}
	if  := .byAlias[];  != nil {
		return 
	}
	if  := .byName[strings.ToLower()];  != nil {
		return 
	}
	if  := .byAlias[strings.ToLower()];  != nil {
		return 
	}

	 := PrioritisedLexers{}
	// Try file extension.
	if  := .Match("filename." + );  != nil {
		 = append(, )
	}
	// Try exact filename.
	if  := .Match();  != nil {
		 = append(, )
	}
	if len() == 0 {
		return nil
	}
	sort.Sort()
	return [0]
}

// MatchMimeType attempts to find a lexer for the given MIME type.
func ( *LexerRegistry) ( string) Lexer {
	 := PrioritisedLexers{}
	for ,  := range .Lexers {
		for ,  := range .Config().MimeTypes {
			if  ==  {
				 = append(, )
			}
		}
	}
	if len() != 0 {
		sort.Sort()
		return [0]
	}
	return nil
}

// Match returns the first lexer matching filename.
//
// Note that this iterates over all file patterns in all lexers, so is not fast.
func ( *LexerRegistry) ( string) Lexer {
	 = filepath.Base()
	 := PrioritisedLexers{}
	// First, try primary filename matches.
	for ,  := range .Lexers {
		 := .Config()
		for ,  := range .Filenames {
			,  := filepath.Match(, )
			if  != nil { // nolint
				panic()
			} else if  {
				 = append(, )
			} else {
				for ,  := range &ignoredSuffixes {
					,  := filepath.Match(+, )
					if  != nil {
						panic()
					} else if  {
						 = append(, )
						break
					}
				}
			}
		}
	}
	if len() > 0 {
		sort.Sort()
		return [0]
	}
	 = nil
	// Next, try filename aliases.
	for ,  := range .Lexers {
		 := .Config()
		for ,  := range .AliasFilenames {
			,  := filepath.Match(, )
			if  != nil { // nolint
				panic()
			} else if  {
				 = append(, )
			} else {
				for ,  := range &ignoredSuffixes {
					,  := filepath.Match(+, )
					if  != nil {
						panic()
					} else if  {
						 = append(, )
						break
					}
				}
			}
		}
	}
	if len() > 0 {
		sort.Sort()
		return [0]
	}
	return nil
}

// Analyse text content and return the "best" lexer..
func ( *LexerRegistry) ( string) Lexer {
	var  Lexer
	 := float32(0.0)
	for ,  := range .Lexers {
		if ,  := .(Analyser);  {
			 := .AnalyseText()
			if  >  {
				 = 
				 = 
			}
		}
	}
	return 
}

// Register a Lexer with the LexerRegistry. If the lexer is already registered
// it will be replaced.
func ( *LexerRegistry) ( Lexer) Lexer {
	.SetRegistry()
	 := .Config()

	.byName[.Name] = 
	.byName[strings.ToLower(.Name)] = 

	for ,  := range .Aliases {
		.byAlias[] = 
		.byAlias[strings.ToLower()] = 
	}

	.Lexers = add(.Lexers, )

	return 
}

// add adds a lexer to a slice of lexers if it doesn't already exist, or if found will replace it.
func add( Lexers,  Lexer) Lexers {
	for ,  := range  {
		if  == nil {
			continue
		}

		if .Config().Name == .Config().Name {
			[] = 
			return 
		}
	}

	return append(, )
}