diff --git a/cmd/zap/cmd/docvalue.go b/cmd/zap/cmd/docvalue.go index 86cf427..0d5bc87 100644 --- a/cmd/zap/cmd/docvalue.go +++ b/cmd/zap/cmd/docvalue.go @@ -18,13 +18,11 @@ import ( "bytes" "encoding/binary" "fmt" - "log" "math" "sort" "strconv" zap "github.com/blevesearch/zapx/v16" - "github.com/golang/snappy" "github.com/spf13/cobra" ) @@ -162,11 +160,7 @@ func dumpDocValueResults(data []byte, args []string, field string, id int, field return nil } // uncompress the already loaded data - uncompressed, err := snappy.Decode(nil, curChunkData) - if err != nil { - log.Printf("snappy err %+v ", err) - return err - } + uncompressed := curChunkData var termSeparator byte = 0xff var termSeparatorSplitSlice = []byte{termSeparator} diff --git a/cmd/zap/cmd/stored.go b/cmd/zap/cmd/stored.go index 5a32d08..c723325 100644 --- a/cmd/zap/cmd/stored.go +++ b/cmd/zap/cmd/stored.go @@ -19,7 +19,6 @@ import ( "fmt" "strconv" - "github.com/golang/snappy" "github.com/spf13/cobra" ) @@ -63,10 +62,7 @@ var storedCmd = &cobra.Command{ idFieldValLen, _ := binary.Uvarint(meta) fmt.Printf("Raw _id (len %d): % x\n", idFieldValLen, raw[:idFieldValLen]) fmt.Printf("Raw fields (len %d): % x\n", dataLen-idFieldValLen, raw[idFieldValLen:]) - uncompressed, err := snappy.Decode(nil, raw[idFieldValLen:]) - if err != nil { - panic(err) - } + uncompressed := raw[idFieldValLen:] fmt.Printf("Uncompressed fields (len %d): % x\n", len(uncompressed), uncompressed) return nil diff --git a/contentcoder.go b/contentcoder.go index 3343d31..f906306 100644 --- a/contentcoder.go +++ b/contentcoder.go @@ -19,8 +19,6 @@ import ( "encoding/binary" "io" "reflect" - - "github.com/golang/snappy" ) var reflectStaticSizeMetaData int @@ -139,7 +137,7 @@ func (c *chunkedContentCoder) flushContents() error { metaData := c.chunkMetaBuf.Bytes() c.final = append(c.final, c.chunkMetaBuf.Bytes()...) // write the compressed data to the final data - c.compressed = snappy.Encode(c.compressed[:cap(c.compressed)], c.chunkBuf.Bytes()) + c.compressed = c.chunkBuf.Bytes() c.incrementBytesWritten(uint64(len(c.compressed))) c.final = append(c.final, c.compressed...) diff --git a/docvalues.go b/docvalues.go index 3d0d269..119455c 100644 --- a/docvalues.go +++ b/docvalues.go @@ -24,7 +24,6 @@ import ( index "github.com/blevesearch/bleve_index_api" segment "github.com/blevesearch/scorch_segment_api/v2" - "github.com/golang/snappy" ) var reflectStaticSizedocValueReader int @@ -217,10 +216,7 @@ func (di *docValueReader) iterateAllDocValues(s *SegmentBase, visitor docNumTerm } // uncompress the already loaded data - uncompressed, err := snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData) - if err != nil { - return err - } + uncompressed := di.curChunkData di.uncompressed = uncompressed start := uint64(0) @@ -246,16 +242,12 @@ func (di *docValueReader) visitDocValues(docNum uint64, } var uncompressed []byte - var err error // use the uncompressed copy if available if len(di.uncompressed) > 0 { uncompressed = di.uncompressed } else { // uncompress the already loaded data - uncompressed, err = snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData) - if err != nil { - return err - } + uncompressed = di.curChunkData di.uncompressed = uncompressed } diff --git a/merge.go b/merge.go index 479f10b..4c60c2b 100644 --- a/merge.go +++ b/merge.go @@ -25,7 +25,6 @@ import ( "github.com/RoaringBitmap/roaring/v2" seg "github.com/blevesearch/scorch_segment_api/v2" - "github.com/golang/snappy" ) var DefaultFileMergerBufferSize = 1024 * 1024 @@ -486,7 +485,7 @@ func mergeStoredAndRemap(segments []*SegmentBase, drops []*roaring.Bitmap, metaBytes := metaBuf.Bytes() - compressed = snappy.Encode(compressed[:cap(compressed)], data) + compressed = data // record where we're about to start writing docNumOffsets[newDocNum] = uint64(w.Count()) diff --git a/new.go b/new.go index c99b933..c335fd9 100644 --- a/new.go +++ b/new.go @@ -24,7 +24,6 @@ import ( index "github.com/blevesearch/bleve_index_api" segment "github.com/blevesearch/scorch_segment_api/v2" - "github.com/golang/snappy" ) var NewSegmentBufferNumResultsBump int = 100 @@ -372,7 +371,7 @@ func (s *interim) writeStoredFields() ( metaBytes := s.metaBuf.Bytes() - compressed = snappy.Encode(compressed[:cap(compressed)], data) + compressed = data s.incrementBytesWritten(uint64(len(compressed))) docStoredOffsets[docNum] = uint64(s.w.Count()) diff --git a/segment.go b/segment.go index 19aebe3..6fd0e3a 100644 --- a/segment.go +++ b/segment.go @@ -28,7 +28,6 @@ import ( mmap "github.com/blevesearch/mmap-go" segment "github.com/blevesearch/scorch_segment_api/v2" "github.com/blevesearch/vellum" - "github.com/golang/snappy" ) var reflectStaticSizeSegmentBase int @@ -567,10 +566,7 @@ func (s *SegmentBase) visitStoredFields(vdc *visitDocumentCtx, num uint64, // handle non-"_id" fields compressed = compressed[idFieldValLen:] - uncompressed, err := snappy.Decode(vdc.buf[:cap(vdc.buf)], compressed) - if err != nil { - return err - } + uncompressed := compressed for keepGoing { field, err := binary.ReadUvarint(&vdc.reader)