// Copyright 2016 The Snappy-Go Authors. All rights reserved.
// Copyright (c) 2019 Klaus Post. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

package s2

import (
	
	
	
	

	
)

func load32( []byte,  int) uint32 {
	return le.Load32(, )
}

func load64( []byte,  int) uint64 {
	return le.Load64(, )
}

// hash6 returns the hash of the lowest 6 bytes of u to fit in a hash table with h bits.
// Preferably h should be a constant and should always be <64.
func hash6( uint64,  uint8) uint32 {
	const  = 227718039650203
	return uint32((( << (64 - 48)) * ) >> ((64 - ) & 63))
}

func encodeGo(,  []byte) []byte {
	if  := MaxEncodedLen(len());  < 0 {
		panic(ErrTooLarge)
	} else if len() <  {
		 = make([]byte, )
	}

	// The block starts with the varint-encoded length of the decompressed bytes.
	 := binary.PutUvarint(, uint64(len()))

	if len() == 0 {
		return [:]
	}
	if len() < minNonLiteralBlockSize {
		 += emitLiteral([:], )
		return [:]
	}
	var  int
	if len() < 64<<10 {
		 = encodeBlockGo64K([:], )
	} else {
		 = encodeBlockGo([:], )
	}
	if  > 0 {
		 += 
		return [:]
	}
	// Not compressible
	 += emitLiteral([:], )
	return [:]
}

// encodeBlockGo encodes a non-empty src to a guaranteed-large-enough dst. It
// assumes that the varint-encoded length of the decompressed bytes has already
// been written.
//
// It also assumes that:
//
//	len(dst) >= MaxEncodedLen(len(src)) &&
//	minNonLiteralBlockSize <= len(src) && len(src) <= maxBlockSize
func encodeBlockGo(,  []byte) ( int) {
	// Initialize the hash table.
	const (
		    = 14
		 = 1 << 

		 = false
	)
	var  []uint32

	// sLimit is when to stop looking for offset/length copies. The inputMargin
	// lets us use a fast path for emitLiteral in the main loop, while we are
	// looking for copies.
	 := len() - inputMargin

	// Bail if we can't compress to at least this.
	 := len() - len()>>5 - 5

	// nextEmit is where in src the next emitLiteral should start from.
	 := 0

	// The encoded form must start with a literal, as there are no previous
	// bytes to copy, so we start looking for hash matches at s == 1.
	 := 1
	 := load64(, )

	// We search for a repeat at -1, but don't output repeats when nextEmit == 0
	 := 1

	for {
		 := 0
		for {
			// Next src position to check
			 :=  + (-)>>6 + 4
			if  >  {
				goto 
			}
			 := hash6(, )
			 := hash6(>>8, )
			 = int([])
			 := int([])
			[] = uint32()
			[] = uint32( + 1)
			 := hash6(>>16, )

			// Check repeat at offset checkRep.
			const  = 1
			if uint32(>>(*8)) == load32(, -+) {
				 :=  + 
				// Extend back
				for  :=  - ;  >  &&  > 0 && [-1] == [-1]; {
					--
					--
				}

				// Bail if we exceed the maximum size.
				if +(-) >  {
					return 0
				}

				 += emitLiteral([:], [:])

				// Extend forward
				 :=  -  + 4 + 
				 += 4 + 
				for  <=  {
					if  := load64(, ) ^ load64(, );  != 0 {
						 += bits.TrailingZeros64() >> 3
						break
					}
					 += 8
					 += 8
				}
				if  {
					// Validate match.
					if  <=  {
						panic("s <= candidate")
					}
					 := [:]
					 := [- : -+(-)]
					if !bytes.Equal(, ) {
						panic("mismatch")
					}
				}
				if  > 0 {
					// same as `add := emitCopy(dst[d:], repeat, s-base)` but skips storing offset.
					 += emitRepeat([:], , -)
				} else {
					// First match, cannot be repeat.
					 += emitCopy([:], , -)
				}
				 = 
				if  >=  {
					goto 
				}
				 = load64(, )
				continue
			}

			if uint32() == load32(, ) {
				break
			}
			 = int([])
			if uint32(>>8) == load32(, ) {
				[] = uint32( + 2)
				 = 
				++
				break
			}
			[] = uint32( + 2)
			if uint32(>>16) == load32(, ) {
				 += 2
				break
			}

			 = load64(, )
			 = 
		}

		// Extend backwards.
		// The top bytes will be rechecked to get the full match.
		for  > 0 &&  >  && [-1] == [-1] {
			--
			--
		}

		// Bail if we exceed the maximum size.
		if +(-) >  {
			return 0
		}

		// A 4-byte match has been found. We'll later see if more than 4 bytes
		// match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
		// them as literal bytes.

		 += emitLiteral([:], [:])

		// Call emitCopy, and then see if another emitCopy could be our next
		// move. Repeat until we find no match for the input immediately after
		// what was consumed by the last emitCopy call.
		//
		// If we exit this loop normally then we need to call emitLiteral next,
		// though we don't yet know how big the literal will be. We handle that
		// by proceeding to the next iteration of the main loop. We also can
		// exit this loop via goto if we get close to exhausting the input.
		for {
			// Invariant: we have a 4-byte match at s, and no need to emit any
			// literal bytes prior to s.
			 := 
			 =  - 

			// Extend the 4-byte match as long as possible.
			 += 4
			 += 4
			for  <= len()-8 {
				if  := load64(, ) ^ load64(, );  != 0 {
					 += bits.TrailingZeros64() >> 3
					break
				}
				 += 8
				 += 8
			}

			 += emitCopy([:], , -)
			if  {
				// Validate match.
				if  <=  {
					panic("s <= candidate")
				}
				 := [:]
				 := [- : -+(-)]
				if !bytes.Equal(, ) {
					panic("mismatch")
				}
			}

			 = 
			if  >=  {
				goto 
			}

			if  >  {
				// Do we have space for more, if not bail.
				return 0
			}
			// Check for an immediate match, otherwise start search at s+1
			 := load64(, -2)
			 := hash6(, )
			 := hash6(>>16, )
			 = int([])
			[] = uint32( - 2)
			[] = uint32()
			if  &&  ==  {
				panic("s == candidate")
			}
			if uint32(>>16) != load32(, ) {
				 = load64(, +1)
				++
				break
			}
		}
	}

:
	if  < len() {
		// Bail if we exceed the maximum size.
		if +len()- >  {
			return 0
		}
		 += emitLiteral([:], [:])
	}
	return 
}

// encodeBlockGo64K is a specialized version for compressing blocks <= 64KB
func encodeBlockGo64K(,  []byte) ( int) {
	// Initialize the hash table.
	const (
		    = 14
		 = 1 << 

		 = false
	)

	var  []uint16

	// sLimit is when to stop looking for offset/length copies. The inputMargin
	// lets us use a fast path for emitLiteral in the main loop, while we are
	// looking for copies.
	 := len() - inputMargin

	// Bail if we can't compress to at least this.
	 := len() - len()>>5 - 5

	// nextEmit is where in src the next emitLiteral should start from.
	 := 0

	// The encoded form must start with a literal, as there are no previous
	// bytes to copy, so we start looking for hash matches at s == 1.
	 := 1
	 := load64(, )

	// We search for a repeat at -1, but don't output repeats when nextEmit == 0
	 := 1

	for {
		 := 0
		for {
			// Next src position to check
			 :=  + (-)>>5 + 4
			if  >  {
				goto 
			}
			 := hash6(, )
			 := hash6(>>8, )
			 = int([])
			 := int([])
			[] = uint16()
			[] = uint16( + 1)
			 := hash6(>>16, )

			// Check repeat at offset checkRep.
			const  = 1
			if uint32(>>(*8)) == load32(, -+) {
				 :=  + 
				// Extend back
				for  :=  - ;  >  &&  > 0 && [-1] == [-1]; {
					--
					--
				}

				// Bail if we exceed the maximum size.
				if +(-) >  {
					return 0
				}

				 += emitLiteral([:], [:])

				// Extend forward
				 :=  -  + 4 + 
				 += 4 + 
				for  <=  {
					if  := load64(, ) ^ load64(, );  != 0 {
						 += bits.TrailingZeros64() >> 3
						break
					}
					 += 8
					 += 8
				}
				if  {
					// Validate match.
					if  <=  {
						panic("s <= candidate")
					}
					 := [:]
					 := [- : -+(-)]
					if !bytes.Equal(, ) {
						panic("mismatch")
					}
				}
				if  > 0 {
					// same as `add := emitCopy(dst[d:], repeat, s-base)` but skips storing offset.
					 += emitRepeat([:], , -)
				} else {
					// First match, cannot be repeat.
					 += emitCopy([:], , -)
				}
				 = 
				if  >=  {
					goto 
				}
				 = load64(, )
				continue
			}

			if uint32() == load32(, ) {
				break
			}
			 = int([])
			if uint32(>>8) == load32(, ) {
				[] = uint16( + 2)
				 = 
				++
				break
			}
			[] = uint16( + 2)
			if uint32(>>16) == load32(, ) {
				 += 2
				break
			}

			 = load64(, )
			 = 
		}

		// Extend backwards.
		// The top bytes will be rechecked to get the full match.
		for  > 0 &&  >  && [-1] == [-1] {
			--
			--
		}

		// Bail if we exceed the maximum size.
		if +(-) >  {
			return 0
		}

		// A 4-byte match has been found. We'll later see if more than 4 bytes
		// match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
		// them as literal bytes.

		 += emitLiteral([:], [:])

		// Call emitCopy, and then see if another emitCopy could be our next
		// move. Repeat until we find no match for the input immediately after
		// what was consumed by the last emitCopy call.
		//
		// If we exit this loop normally then we need to call emitLiteral next,
		// though we don't yet know how big the literal will be. We handle that
		// by proceeding to the next iteration of the main loop. We also can
		// exit this loop via goto if we get close to exhausting the input.
		for {
			// Invariant: we have a 4-byte match at s, and no need to emit any
			// literal bytes prior to s.
			 := 
			 =  - 

			// Extend the 4-byte match as long as possible.
			 += 4
			 += 4
			for  <= len()-8 {
				if  := load64(, ) ^ load64(, );  != 0 {
					 += bits.TrailingZeros64() >> 3
					break
				}
				 += 8
				 += 8
			}

			 += emitCopy([:], , -)
			if  {
				// Validate match.
				if  <=  {
					panic("s <= candidate")
				}
				 := [:]
				 := [- : -+(-)]
				if !bytes.Equal(, ) {
					panic("mismatch")
				}
			}

			 = 
			if  >=  {
				goto 
			}

			if  >  {
				// Do we have space for more, if not bail.
				return 0
			}
			// Check for an immediate match, otherwise start search at s+1
			 := load64(, -2)
			 := hash6(, )
			 := hash6(>>16, )
			 = int([])
			[] = uint16( - 2)
			[] = uint16()
			if  &&  ==  {
				panic("s == candidate")
			}
			if uint32(>>16) != load32(, ) {
				 = load64(, +1)
				++
				break
			}
		}
	}

:
	if  < len() {
		// Bail if we exceed the maximum size.
		if +len()- >  {
			return 0
		}
		 += emitLiteral([:], [:])
	}
	return 
}

func encodeBlockSnappyGo(,  []byte) ( int) {
	// Initialize the hash table.
	const (
		    = 14
		 = 1 << 
	)
	var  []uint32

	// sLimit is when to stop looking for offset/length copies. The inputMargin
	// lets us use a fast path for emitLiteral in the main loop, while we are
	// looking for copies.
	 := len() - inputMargin

	// Bail if we can't compress to at least this.
	 := len() - len()>>5 - 5

	// nextEmit is where in src the next emitLiteral should start from.
	 := 0

	// The encoded form must start with a literal, as there are no previous
	// bytes to copy, so we start looking for hash matches at s == 1.
	 := 1
	 := load64(, )

	// We search for a repeat at -1, but don't output repeats when nextEmit == 0
	 := 1

	for {
		 := 0
		for {
			// Next src position to check
			 :=  + (-)>>6 + 4
			if  >  {
				goto 
			}
			 := hash6(, )
			 := hash6(>>8, )
			 = int([])
			 := int([])
			[] = uint32()
			[] = uint32( + 1)
			 := hash6(>>16, )

			// Check repeat at offset checkRep.
			const  = 1
			if uint32(>>(*8)) == load32(, -+) {
				 :=  + 
				// Extend back
				for  :=  - ;  >  &&  > 0 && [-1] == [-1]; {
					--
					--
				}
				// Bail if we exceed the maximum size.
				if +(-) >  {
					return 0
				}

				 += emitLiteral([:], [:])

				// Extend forward
				 :=  -  + 4 + 
				 += 4 + 
				for  <=  {
					if  := load64(, ) ^ load64(, );  != 0 {
						 += bits.TrailingZeros64() >> 3
						break
					}
					 += 8
					 += 8
				}

				 += emitCopyNoRepeat([:], , -)
				 = 
				if  >=  {
					goto 
				}

				 = load64(, )
				continue
			}

			if uint32() == load32(, ) {
				break
			}
			 = int([])
			if uint32(>>8) == load32(, ) {
				[] = uint32( + 2)
				 = 
				++
				break
			}
			[] = uint32( + 2)
			if uint32(>>16) == load32(, ) {
				 += 2
				break
			}

			 = load64(, )
			 = 
		}

		// Extend backwards
		for  > 0 &&  >  && [-1] == [-1] {
			--
			--
		}

		// Bail if we exceed the maximum size.
		if +(-) >  {
			return 0
		}

		// A 4-byte match has been found. We'll later see if more than 4 bytes
		// match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
		// them as literal bytes.

		 += emitLiteral([:], [:])

		// Call emitCopy, and then see if another emitCopy could be our next
		// move. Repeat until we find no match for the input immediately after
		// what was consumed by the last emitCopy call.
		//
		// If we exit this loop normally then we need to call emitLiteral next,
		// though we don't yet know how big the literal will be. We handle that
		// by proceeding to the next iteration of the main loop. We also can
		// exit this loop via goto if we get close to exhausting the input.
		for {
			// Invariant: we have a 4-byte match at s, and no need to emit any
			// literal bytes prior to s.
			 := 
			 =  - 

			// Extend the 4-byte match as long as possible.
			 += 4
			 += 4
			for  <= len()-8 {
				if  := load64(, ) ^ load64(, );  != 0 {
					 += bits.TrailingZeros64() >> 3
					break
				}
				 += 8
				 += 8
			}

			 += emitCopyNoRepeat([:], , -)
			if false {
				// Validate match.
				 := [:]
				 := [- : -+(-)]
				if !bytes.Equal(, ) {
					panic("mismatch")
				}
			}

			 = 
			if  >=  {
				goto 
			}

			if  >  {
				// Do we have space for more, if not bail.
				return 0
			}
			// Check for an immediate match, otherwise start search at s+1
			 := load64(, -2)
			 := hash6(, )
			 := hash6(>>16, )
			 = int([])
			[] = uint32( - 2)
			[] = uint32()
			if uint32(>>16) != load32(, ) {
				 = load64(, +1)
				++
				break
			}
		}
	}

:
	if  < len() {
		// Bail if we exceed the maximum size.
		if +len()- >  {
			return 0
		}
		 += emitLiteral([:], [:])
	}
	return 
}

// encodeBlockSnappyGo64K is a special version of encodeBlockSnappyGo for sizes <64KB
func encodeBlockSnappyGo64K(,  []byte) ( int) {
	// Initialize the hash table.
	const (
		    = 14
		 = 1 << 
	)

	var  []uint16

	// sLimit is when to stop looking for offset/length copies. The inputMargin
	// lets us use a fast path for emitLiteral in the main loop, while we are
	// looking for copies.
	 := len() - inputMargin

	// Bail if we can't compress to at least this.
	 := len() - len()>>5 - 5

	// nextEmit is where in src the next emitLiteral should start from.
	 := 0

	// The encoded form must start with a literal, as there are no previous
	// bytes to copy, so we start looking for hash matches at s == 1.
	 := 1
	 := load64(, )

	// We search for a repeat at -1, but don't output repeats when nextEmit == 0
	 := 1

	for {
		 := 0
		for {
			// Next src position to check
			 :=  + (-)>>5 + 4
			if  >  {
				goto 
			}
			 := hash6(, )
			 := hash6(>>8, )
			 = int([])
			 := int([])
			[] = uint16()
			[] = uint16( + 1)
			 := hash6(>>16, )

			// Check repeat at offset checkRep.
			const  = 1
			if uint32(>>(*8)) == load32(, -+) {
				 :=  + 
				// Extend back
				for  :=  - ;  >  &&  > 0 && [-1] == [-1]; {
					--
					--
				}
				// Bail if we exceed the maximum size.
				if +(-) >  {
					return 0
				}

				 += emitLiteral([:], [:])

				// Extend forward
				 :=  -  + 4 + 
				 += 4 + 
				for  <=  {
					if  := load64(, ) ^ load64(, );  != 0 {
						 += bits.TrailingZeros64() >> 3
						break
					}
					 += 8
					 += 8
				}

				 += emitCopyNoRepeat([:], , -)
				 = 
				if  >=  {
					goto 
				}

				 = load64(, )
				continue
			}

			if uint32() == load32(, ) {
				break
			}
			 = int([])
			if uint32(>>8) == load32(, ) {
				[] = uint16( + 2)
				 = 
				++
				break
			}
			[] = uint16( + 2)
			if uint32(>>16) == load32(, ) {
				 += 2
				break
			}

			 = load64(, )
			 = 
		}

		// Extend backwards
		for  > 0 &&  >  && [-1] == [-1] {
			--
			--
		}

		// Bail if we exceed the maximum size.
		if +(-) >  {
			return 0
		}

		// A 4-byte match has been found. We'll later see if more than 4 bytes
		// match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
		// them as literal bytes.

		 += emitLiteral([:], [:])

		// Call emitCopy, and then see if another emitCopy could be our next
		// move. Repeat until we find no match for the input immediately after
		// what was consumed by the last emitCopy call.
		//
		// If we exit this loop normally then we need to call emitLiteral next,
		// though we don't yet know how big the literal will be. We handle that
		// by proceeding to the next iteration of the main loop. We also can
		// exit this loop via goto if we get close to exhausting the input.
		for {
			// Invariant: we have a 4-byte match at s, and no need to emit any
			// literal bytes prior to s.
			 := 
			 =  - 

			// Extend the 4-byte match as long as possible.
			 += 4
			 += 4
			for  <= len()-8 {
				if  := load64(, ) ^ load64(, );  != 0 {
					 += bits.TrailingZeros64() >> 3
					break
				}
				 += 8
				 += 8
			}

			 += emitCopyNoRepeat([:], , -)
			if false {
				// Validate match.
				 := [:]
				 := [- : -+(-)]
				if !bytes.Equal(, ) {
					panic("mismatch")
				}
			}

			 = 
			if  >=  {
				goto 
			}

			if  >  {
				// Do we have space for more, if not bail.
				return 0
			}
			// Check for an immediate match, otherwise start search at s+1
			 := load64(, -2)
			 := hash6(, )
			 := hash6(>>16, )
			 = int([])
			[] = uint16( - 2)
			[] = uint16()
			if uint32(>>16) != load32(, ) {
				 = load64(, +1)
				++
				break
			}
		}
	}

:
	if  < len() {
		// Bail if we exceed the maximum size.
		if +len()- >  {
			return 0
		}
		 += emitLiteral([:], [:])
	}
	return 
}

// encodeBlockGo encodes a non-empty src to a guaranteed-large-enough dst. It
// assumes that the varint-encoded length of the decompressed bytes has already
// been written.
//
// It also assumes that:
//
//	len(dst) >= MaxEncodedLen(len(src)) &&
//	minNonLiteralBlockSize <= len(src) && len(src) <= maxBlockSize
func encodeBlockDictGo(,  []byte,  *Dict) ( int) {
	// Initialize the hash table.
	const (
		    = 14
		 = 1 << 
		     = 8 // maximum bytes ahead without checking sLimit

		 = false
	)
	.initFast()

	var  []uint32

	// sLimit is when to stop looking for offset/length copies. The inputMargin
	// lets us use a fast path for emitLiteral in the main loop, while we are
	// looking for copies.
	 := len() - inputMargin
	if  > MaxDictSrcOffset- {
		 = MaxDictSrcOffset - 
	}

	// Bail if we can't compress to at least this.
	 := len() - len()>>5 - 5

	// nextEmit is where in src the next emitLiteral should start from.
	 := 0

	// The encoded form can start with a dict entry (copy or repeat).
	 := 0

	// Convert dict repeat to offset
	 := len(.dict) - .repeat
	 := load64(, 0)

	// While in dict
:
	for {
		// Next src position to check
		 :=  + (-)>>6 + 4
		 := hash6(, )
		 := hash6(>>8, )
		if  >  {
			if  {
				fmt.Println("slimit reached", , )
			}
			break 
		}
		 := int(.fastTable[])
		 := int(.fastTable[])
		 := int([])
		 := int([])
		[] = uint32()
		[] = uint32( + 1)
		 := hash6(>>16, )

		// Check repeat at offset checkRep.
		const  = 1

		if  >  {
			 := len(.dict) -  + 
			if - >= 4 && uint32() == load32(.dict, ) {
				// Extend back
				 := 
				for  := ;  >  &&  > 0 && .dict[-1] == [-1]; {
					--
					--
				}
				// Bail if we exceed the maximum size.
				if +(-) >  {
					return 0
				}

				 += emitLiteral([:], [:])
				if  &&  !=  {
					fmt.Println("emitted ", -, "literals")
				}
				 += 4
				 += 4
				for  < len(.dict)-8 &&  <= len()-8 {
					if  := load64(, ) ^ load64(.dict, );  != 0 {
						 += bits.TrailingZeros64() >> 3
						break
					}
					 += 8
					 += 8
				}
				 += emitRepeat([:], , -)
				if  {
					fmt.Println("emitted dict repeat length", -, "offset:", , "s:", )
				}
				 = 
				if  >=  {
					break 
				}
				 = load64(, )
				continue
			}
		} else if uint32(>>(*8)) == load32(, -+) {
			 :=  + 
			// Extend back
			for  :=  - ;  >  &&  > 0 && [-1] == [-1]; {
				--
				--
			}
			 += emitLiteral([:], [:])
			if  &&  !=  {
				fmt.Println("emitted ", -, "literals")
			}

			// Extend forward
			 :=  -  + 4 + 
			 += 4 + 
			for  <=  {
				if  := load64(, ) ^ load64(, );  != 0 {
					 += bits.TrailingZeros64() >> 3
					break
				}
				 += 8
				 += 8
			}
			if  {
				// Validate match.
				if  <=  {
					panic("s <= candidate")
				}
				 := [:]
				 := [- : -+(-)]
				if !bytes.Equal(, ) {
					panic("mismatch")
				}
			}

			if  > 0 {
				// same as `add := emitCopy(dst[d:], repeat, s-base)` but skips storing offset.
				 += emitRepeat([:], , -)
			} else {
				// First match, cannot be repeat.
				 += emitCopy([:], , -)
			}

			 = 
			if  >=  {
				break 
			}
			if  {
				fmt.Println("emitted reg repeat", -, "s:", )
			}
			 = load64(, )
			continue 
		}
		if  == 0 {
			 = load64(, )
			 = 
			continue 
		}
		// Start with table. These matches will always be closer.
		if uint32() == load32(, ) {
			goto 
		}
		 = int([])
		if uint32(>>8) == load32(, ) {
			[] = uint32( + 2)
			 = 
			++
			goto 
		}

		// Check dict. Dicts have longer offsets, so we want longer matches.
		if  == load64(.dict, ) {
			[] = uint32( + 2)
			goto 
		}

		 = int(.fastTable[])
		// Check if upper 7 bytes match
		if  >= 1 {
			if ^load64(.dict, -1) < (1 << 8) {
				[] = uint32( + 2)
				 = 
				++
				goto 
			}
		}

		[] = uint32( + 2)
		if uint32(>>16) == load32(, ) {
			 += 2
			goto 
		}
		if  >= 2 {
			// Check if upper 6 bytes match
			if ^load64(.dict, -2) < (1 << 16) {
				 += 2
				goto 
			}
		}

		 = load64(, )
		 = 
		continue 

	:
		{
			if  {
				if load32(.dict, ) != load32(, ) {
					panic("dict emit mismatch")
				}
			}
			// Extend backwards.
			// The top bytes will be rechecked to get the full match.
			for  > 0 &&  >  && .dict[-1] == [-1] {
				--
				--
			}

			// Bail if we exceed the maximum size.
			if +(-) >  {
				return 0
			}

			// A 4-byte match has been found. We'll later see if more than 4 bytes
			// match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
			// them as literal bytes.

			 += emitLiteral([:], [:])
			if  &&  !=  {
				fmt.Println("emitted ", -, "literals")
			}
			{
				// Invariant: we have a 4-byte match at s, and no need to emit any
				// literal bytes prior to s.
				 := 
				 =  + (len(.dict)) - 

				// Extend the 4-byte match as long as possible.
				 += 4
				 += 4
				for  <= len()-8 && len(.dict)- >= 8 {
					if  := load64(, ) ^ load64(.dict, );  != 0 {
						 += bits.TrailingZeros64() >> 3
						break
					}
					 += 8
					 += 8
				}

				// Matches longer than 64 are split.
				if  <=  || - < 8 {
					 += emitCopy([:], , -)
				} else {
					// Split to ensure we don't start a copy within next block
					 += emitCopy([:], , 4)
					 += emitRepeat([:], , --4)
				}
				if false {
					// Validate match.
					if  <=  {
						panic("s <= candidate")
					}
					 := [:]
					 := .dict[- : -+(-)]
					if !bytes.Equal(, ) {
						panic("mismatch")
					}
				}
				if  {
					fmt.Println("emitted dict copy, length", -, "offset:", , "s:", )
				}
				 = 
				if  >=  {
					break 
				}

				if  >  {
					// Do we have space for more, if not bail.
					return 0
				}

				// Index and continue loop to try new candidate.
				 := load64(, -2)
				 := hash6(, )
				 := hash6(>>8, )
				[] = uint32( - 2)
				[] = uint32( - 1)
				 = load64(, )
			}
			continue
		}
	:

		// Extend backwards.
		// The top bytes will be rechecked to get the full match.
		for  > 0 &&  >  && [-1] == [-1] {
			--
			--
		}

		// Bail if we exceed the maximum size.
		if +(-) >  {
			return 0
		}

		// A 4-byte match has been found. We'll later see if more than 4 bytes
		// match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
		// them as literal bytes.

		 += emitLiteral([:], [:])
		if  &&  !=  {
			fmt.Println("emitted ", -, "literals")
		}
		// Call emitCopy, and then see if another emitCopy could be our next
		// move. Repeat until we find no match for the input immediately after
		// what was consumed by the last emitCopy call.
		//
		// If we exit this loop normally then we need to call emitLiteral next,
		// though we don't yet know how big the literal will be. We handle that
		// by proceeding to the next iteration of the main loop. We also can
		// exit this loop via goto if we get close to exhausting the input.
		for {
			// Invariant: we have a 4-byte match at s, and no need to emit any
			// literal bytes prior to s.
			 := 
			 =  - 

			// Extend the 4-byte match as long as possible.
			 += 4
			 += 4
			for  <= len()-8 {
				if  := load64(, ) ^ load64(, );  != 0 {
					 += bits.TrailingZeros64() >> 3
					break
				}
				 += 8
				 += 8
			}

			 += emitCopy([:], , -)
			if  {
				// Validate match.
				if  <=  {
					panic("s <= candidate")
				}
				 := [:]
				 := [- : -+(-)]
				if !bytes.Equal(, ) {
					panic("mismatch")
				}
			}
			if  {
				fmt.Println("emitted src copy, length", -, "offset:", , "s:", )
			}
			 = 
			if  >=  {
				break 
			}

			if  >  {
				// Do we have space for more, if not bail.
				return 0
			}
			// Check for an immediate match, otherwise start search at s+1
			 := load64(, -2)
			 := hash6(, )
			 := hash6(>>16, )
			 = int([])
			[] = uint32( - 2)
			[] = uint32()
			if  &&  ==  {
				panic("s == candidate")
			}
			if uint32(>>16) != load32(, ) {
				 = load64(, +1)
				++
				break
			}
		}
	}

	// Search without dict:
	if  >  {
		 = 0
	}

	// No more dict
	 = len() - inputMargin
	if  >=  {
		goto 
	}
	if  {
		fmt.Println("non-dict matching at", , "repeat:", )
	}
	 = load64(, )
	if  {
		fmt.Println("now", , "->", , "out:", , "left:", len()-, "nextemit:", , "dstLimit:", , "s:", )
	}
	for {
		 := 0
		for {
			// Next src position to check
			 :=  + (-)>>6 + 4
			if  >  {
				goto 
			}
			 := hash6(, )
			 := hash6(>>8, )
			 = int([])
			 := int([])
			[] = uint32()
			[] = uint32( + 1)
			 := hash6(>>16, )

			// Check repeat at offset checkRep.
			const  = 1
			if  > 0 && uint32(>>(*8)) == load32(, -+) {
				 :=  + 
				// Extend back
				for  :=  - ;  >  &&  > 0 && [-1] == [-1]; {
					--
					--
				}
				// Bail if we exceed the maximum size.
				if +(-) >  {
					return 0
				}

				 += emitLiteral([:], [:])
				if  &&  !=  {
					fmt.Println("emitted ", -, "literals")
				}
				// Extend forward
				 :=  -  + 4 + 
				 += 4 + 
				for  <=  {
					if  := load64(, ) ^ load64(, );  != 0 {
						 += bits.TrailingZeros64() >> 3
						break
					}
					 += 8
					 += 8
				}
				if  {
					// Validate match.
					if  <=  {
						panic("s <= candidate")
					}
					 := [:]
					 := [- : -+(-)]
					if !bytes.Equal(, ) {
						panic("mismatch")
					}
				}
				if  > 0 {
					// same as `add := emitCopy(dst[d:], repeat, s-base)` but skips storing offset.
					 += emitRepeat([:], , -)
				} else {
					// First match, cannot be repeat.
					 += emitCopy([:], , -)
				}
				if  {
					fmt.Println("emitted src repeat length", -, "offset:", , "s:", )
				}
				 = 
				if  >=  {
					goto 
				}

				 = load64(, )
				continue
			}

			if uint32() == load32(, ) {
				break
			}
			 = int([])
			if uint32(>>8) == load32(, ) {
				[] = uint32( + 2)
				 = 
				++
				break
			}
			[] = uint32( + 2)
			if uint32(>>16) == load32(, ) {
				 += 2
				break
			}

			 = load64(, )
			 = 
		}

		// Extend backwards.
		// The top bytes will be rechecked to get the full match.
		for  > 0 &&  >  && [-1] == [-1] {
			--
			--
		}

		// Bail if we exceed the maximum size.
		if +(-) >  {
			return 0
		}

		// A 4-byte match has been found. We'll later see if more than 4 bytes
		// match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
		// them as literal bytes.

		 += emitLiteral([:], [:])
		if  &&  !=  {
			fmt.Println("emitted ", -, "literals")
		}
		// Call emitCopy, and then see if another emitCopy could be our next
		// move. Repeat until we find no match for the input immediately after
		// what was consumed by the last emitCopy call.
		//
		// If we exit this loop normally then we need to call emitLiteral next,
		// though we don't yet know how big the literal will be. We handle that
		// by proceeding to the next iteration of the main loop. We also can
		// exit this loop via goto if we get close to exhausting the input.
		for {
			// Invariant: we have a 4-byte match at s, and no need to emit any
			// literal bytes prior to s.
			 := 
			 =  - 

			// Extend the 4-byte match as long as possible.
			 += 4
			 += 4
			for  <= len()-8 {
				if  := load64(, ) ^ load64(, );  != 0 {
					 += bits.TrailingZeros64() >> 3
					break
				}
				 += 8
				 += 8
			}

			 += emitCopy([:], , -)
			if  {
				// Validate match.
				if  <=  {
					panic("s <= candidate")
				}
				 := [:]
				 := [- : -+(-)]
				if !bytes.Equal(, ) {
					panic("mismatch")
				}
			}
			if  {
				fmt.Println("emitted src copy, length", -, "offset:", , "s:", )
			}
			 = 
			if  >=  {
				goto 
			}

			if  >  {
				// Do we have space for more, if not bail.
				return 0
			}
			// Check for an immediate match, otherwise start search at s+1
			 := load64(, -2)
			 := hash6(, )
			 := hash6(>>16, )
			 = int([])
			[] = uint32( - 2)
			[] = uint32()
			if  &&  ==  {
				panic("s == candidate")
			}
			if uint32(>>16) != load32(, ) {
				 = load64(, +1)
				++
				break
			}
		}
	}

:
	if  < len() {
		// Bail if we exceed the maximum size.
		if +len()- >  {
			return 0
		}
		 += emitLiteral([:], [:])
		if  &&  !=  {
			fmt.Println("emitted ", len()-, "literals")
		}
	}
	return 
}