lib/encoding/zstd: disable CRC checks in pure Go build

This should give slightly better compression and decompressions performance.
Additionally this shaves off 4 bytes per each compressed block.
This commit is contained in:
Aliaksandr Valialkin 2019-07-24 19:15:33 +03:00
parent 54f035d4ce
commit 97b5dc7122
2 changed files with 19 additions and 8 deletions

View File

@ -45,17 +45,15 @@ func CompressLevel(dst, src []byte, compressionLevel int) []byte {
func getEncoder(compressionLevel int) *zstd.Encoder {
r := av.Load().(registry)
if e, ok := r[compressionLevel]; ok {
e := r[compressionLevel]
if e != nil {
return e
}
level := zstd.EncoderLevelFromZstd(compressionLevel)
e, err := zstd.NewWriter(nil, zstd.WithEncoderLevel(level))
if err != nil {
logger.Panicf("BUG: failed to create ZSTD writer: %s", err)
}
mu.Lock()
// Create the encoder under lock in order to prevent from wasted work
// when concurrent goroutines create encoder for the same compressionLevel.
e = newEncoder(compressionLevel)
r1 := av.Load().(registry)
r2 := make(registry)
for k, v := range r1 {
@ -67,3 +65,14 @@ func getEncoder(compressionLevel int) *zstd.Encoder {
return e
}
func newEncoder(compressionLevel int) *zstd.Encoder {
level := zstd.EncoderLevelFromZstd(compressionLevel)
e, err := zstd.NewWriter(nil,
zstd.WithEncoderCRC(false), // Disable CRC for performance reasons.
zstd.WithEncoderLevel(level))
if err != nil {
logger.Panicf("BUG: failed to create ZSTD writer: %s", err)
}
return e
}

View File

@ -68,7 +68,9 @@ func testCompressDecompress(t *testing.T, compress compressFn, decompress decomp
type compressFn func(dst, src []byte, compressionLevel int) ([]byte, error)
func pureCompress(dst, src []byte, _ int) ([]byte, error) {
w, err := pure.NewWriter(nil, pure.WithEncoderLevel(pure.SpeedBestCompression))
w, err := pure.NewWriter(nil,
pure.WithEncoderCRC(false), // Disable CRC for performance reasons.
pure.WithEncoderLevel(pure.SpeedBestCompression))
if err != nil {
return nil, err
}