Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 1 addition & 7 deletions cmd/zap/cmd/docvalue.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,11 @@ import (
"bytes"
"encoding/binary"
"fmt"
"log"
"math"
"sort"
"strconv"

zap "github.com/blevesearch/zapx/v16"
"github.com/golang/snappy"
"github.com/spf13/cobra"
)

Expand Down Expand Up @@ -162,11 +160,7 @@ func dumpDocValueResults(data []byte, args []string, field string, id int, field
return nil
}
// uncompress the already loaded data
uncompressed, err := snappy.Decode(nil, curChunkData)
if err != nil {
log.Printf("snappy err %+v ", err)
return err
}
uncompressed := curChunkData

var termSeparator byte = 0xff
var termSeparatorSplitSlice = []byte{termSeparator}
Expand Down
6 changes: 1 addition & 5 deletions cmd/zap/cmd/stored.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ import (
"fmt"
"strconv"

"github.com/golang/snappy"
"github.com/spf13/cobra"
)

Expand Down Expand Up @@ -63,10 +62,7 @@ var storedCmd = &cobra.Command{
idFieldValLen, _ := binary.Uvarint(meta)
fmt.Printf("Raw _id (len %d): % x\n", idFieldValLen, raw[:idFieldValLen])
fmt.Printf("Raw fields (len %d): % x\n", dataLen-idFieldValLen, raw[idFieldValLen:])
uncompressed, err := snappy.Decode(nil, raw[idFieldValLen:])
if err != nil {
panic(err)
}
uncompressed := raw[idFieldValLen:]
fmt.Printf("Uncompressed fields (len %d): % x\n", len(uncompressed), uncompressed)

return nil
Expand Down
4 changes: 1 addition & 3 deletions contentcoder.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@ import (
"encoding/binary"
"io"
"reflect"

"github.com/golang/snappy"
)

var reflectStaticSizeMetaData int
Expand Down Expand Up @@ -139,7 +137,7 @@ func (c *chunkedContentCoder) flushContents() error {
metaData := c.chunkMetaBuf.Bytes()
c.final = append(c.final, c.chunkMetaBuf.Bytes()...)
// write the compressed data to the final data
c.compressed = snappy.Encode(c.compressed[:cap(c.compressed)], c.chunkBuf.Bytes())
c.compressed = c.chunkBuf.Bytes()
c.incrementBytesWritten(uint64(len(c.compressed)))
c.final = append(c.final, c.compressed...)

Expand Down
12 changes: 2 additions & 10 deletions docvalues.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import (

index "github.com/blevesearch/bleve_index_api"
segment "github.com/blevesearch/scorch_segment_api/v2"
"github.com/golang/snappy"
)

var reflectStaticSizedocValueReader int
Expand Down Expand Up @@ -217,10 +216,7 @@ func (di *docValueReader) iterateAllDocValues(s *SegmentBase, visitor docNumTerm
}

// uncompress the already loaded data
uncompressed, err := snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData)
if err != nil {
return err
}
uncompressed := di.curChunkData
di.uncompressed = uncompressed

start := uint64(0)
Expand All @@ -246,16 +242,12 @@ func (di *docValueReader) visitDocValues(docNum uint64,
}

var uncompressed []byte
var err error
// use the uncompressed copy if available
if len(di.uncompressed) > 0 {
uncompressed = di.uncompressed
} else {
// uncompress the already loaded data
uncompressed, err = snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData)
if err != nil {
return err
}
uncompressed = di.curChunkData
di.uncompressed = uncompressed
}

Expand Down
3 changes: 1 addition & 2 deletions merge.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ import (

"github.com/RoaringBitmap/roaring/v2"
seg "github.com/blevesearch/scorch_segment_api/v2"
"github.com/golang/snappy"
)

var DefaultFileMergerBufferSize = 1024 * 1024
Expand Down Expand Up @@ -486,7 +485,7 @@ func mergeStoredAndRemap(segments []*SegmentBase, drops []*roaring.Bitmap,

metaBytes := metaBuf.Bytes()

compressed = snappy.Encode(compressed[:cap(compressed)], data)
compressed = data

// record where we're about to start writing
docNumOffsets[newDocNum] = uint64(w.Count())
Expand Down
3 changes: 1 addition & 2 deletions new.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import (

index "github.com/blevesearch/bleve_index_api"
segment "github.com/blevesearch/scorch_segment_api/v2"
"github.com/golang/snappy"
)

var NewSegmentBufferNumResultsBump int = 100
Expand Down Expand Up @@ -372,7 +371,7 @@ func (s *interim) writeStoredFields() (

metaBytes := s.metaBuf.Bytes()

compressed = snappy.Encode(compressed[:cap(compressed)], data)
compressed = data
s.incrementBytesWritten(uint64(len(compressed)))
docStoredOffsets[docNum] = uint64(s.w.Count())

Expand Down
6 changes: 1 addition & 5 deletions segment.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ import (
mmap "github.com/blevesearch/mmap-go"
segment "github.com/blevesearch/scorch_segment_api/v2"
"github.com/blevesearch/vellum"
"github.com/golang/snappy"
)

var reflectStaticSizeSegmentBase int
Expand Down Expand Up @@ -567,10 +566,7 @@ func (s *SegmentBase) visitStoredFields(vdc *visitDocumentCtx, num uint64,
// handle non-"_id" fields
compressed = compressed[idFieldValLen:]

uncompressed, err := snappy.Decode(vdc.buf[:cap(vdc.buf)], compressed)
if err != nil {
return err
}
uncompressed := compressed

for keepGoing {
field, err := binary.ReadUvarint(&vdc.reader)
Expand Down