Skip to content

Commit

Permalink
Fix getSnippetHash not considering all files (#765)
Browse files Browse the repository at this point in the history
* Fix `getSnippetHash` not considering all files
Made a stupid mistake in the previous PR: #759

This fixes it and adds another benchmark test to ensure it doesn't happen again.
I also removed the Github Actions benchmark test, as it's not really useful, anytime we change the tests, we'll get erroneous results which will be annoying.
Instead, I added the benchmark tests to the Drone run, we can compare whenever we want.

* linting

* Add changelog, will release straight away
  • Loading branch information
julienduchesne authored Sep 28, 2022
1 parent 1bac47d commit 904a7c5
Show file tree
Hide file tree
Showing 5 changed files with 96 additions and 131 deletions.
75 changes: 0 additions & 75 deletions .github/workflows/benchstat-pr.yml

This file was deleted.

7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
# Changelog

## 0.23.1 (2022-09-28)

### Bug Fixes/Enhancements

- **export**: Fix `getSnippetHash` not considering all files
**[#765](https://github.com/grafana/tanka/pull/765)**

## 0.23.0 (2022-09-26)

### Features
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ lint: $(GOLINTER)
$(GOLINTER) run

test:
go test ./...
go test ./... -bench=. -benchmem

# Compilation
dev:
Expand Down
2 changes: 1 addition & 1 deletion pkg/jsonnet/imports.go
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ func findImportRecursiveRegexp(list map[string]bool, vm *jsonnet.VM, filename, c
}

if list[abs] {
return nil
continue
}
list[abs] = true

Expand Down
141 changes: 87 additions & 54 deletions pkg/jsonnet/imports_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,81 +28,114 @@ func TestTransitiveImports(t *testing.T) {
}, imports)
}

const testFile = `
local localImport = <IMPORT>;
local myFunc = function() <IMPORT>;
func BenchmarkGetSnippetHash(b *testing.B) {
for _, tc := range []struct {
name string
importFromMain bool
expectedHash string
}{
{
name: "all-imported-from-main",
importFromMain: true,
expectedHash: "ktY8NYZOoPacsNYrH7-DslRgLG54EMRdk3MQSM3vcUg=",
},
{
name: "deeply-nested",
importFromMain: false,
expectedHash: "W1Q_uS6jTGcsd7nvJfc-i785sqjBmOzfOAzqzhXVc0A=",
},
} {
b.Run(tc.name, func(b *testing.B) {
// Create a very large and complex project
tempDir := b.TempDir()
generateTestProject(b, tempDir, 10000, tc.importFromMain)

// Create a VM. It's important to reuse the same VM
// While there is a caching mechanism that normally shouldn't be shared in a benchmark iteration,
// it's useful to evaluate its impact here, because the caching will also improve the evaluation performance afterwards.
vm := MakeVM(Opts{ImportPaths: []string{tempDir}})
content, err := os.ReadFile(filepath.Join(tempDir, "main.jsonnet"))
require.NoError(b, err)

// Run the benchmark
mainPath := filepath.Join(tempDir, "main.jsonnet")
c := string(content)
b.ResetTimer()
for i := 0; i < b.N; i++ {
fileHashes = sync.Map{}
hash, err := getSnippetHash(vm, mainPath, c)
require.NoError(b, err)
require.Equal(b, tc.expectedHash, hash)
}
})
}
}

{
local this = self,
func generateTestProject(t testing.TB, dir string, depth int, importAllFromMain bool) []string {
t.Helper()

attribute: {
name: 'test',
value: self.name,
otherValue: 'other ' + self.value,
},
nested: {
const testFile = `
local localImport = <IMPORT>;
local myFunc = function() <IMPORT>;
{
local this = self,
attribute: {
name: 'test',
value: self.name,
otherValue: 'other ' + self.value,
},
nested: {
nested: {
nested: {
nested1: {
nested: {
nested1: {
nested: {
attribute: <IMPORT>,
nested: {
nested1: {
nested: {
nested1: {
nested: {
attribute: <IMPORT>,
},
},
nested2: {
strValue: this.nested.nested.nested,
},
},
nested2: {
strValue: this.nested.nested.nested,
},
},
},
nested2: {
intValue: 1,
importValue: <IMPORT>,
nested2: {
intValue: 1,
importValue: <IMPORT>,
},
},
},
},
},
},
other: myFunc(),
useLocal: localImport,
}`

other: myFunc(),
useLocal: localImport,
}`

func BenchmarkGetSnippetHash(b *testing.B) {
// Create a very large and complex project
tempDir := b.TempDir()
var allFiles []string

var mainContentSplit []string
for i := 0; i < 1000; i++ {
mainContentSplit = append(mainContentSplit, fmt.Sprintf("(import 'file%d.libsonnet')", i))
}
require.NoError(b, os.WriteFile(filepath.Join(tempDir, "main.jsonnet"), []byte(strings.Join(mainContentSplit, " + ")), 0644))
for i := 0; i < 1000; i++ {
filePath := filepath.Join(dir, fmt.Sprintf("file%d.libsonnet", i))
err := os.WriteFile(
filepath.Join(tempDir, fmt.Sprintf("file%d.libsonnet", i)),
filePath,
[]byte(strings.ReplaceAll(testFile, "<IMPORT>", fmt.Sprintf("import 'file%d.libsonnet'", i+1))),
0644,
)
require.NoError(b, err)
require.NoError(t, err)
allFiles = append(allFiles, filePath)
}
require.NoError(b, os.WriteFile(filepath.Join(tempDir, "file1000.libsonnet"), []byte(`"a string"`), 0644))

// Create a VM. It's important to reuse the same VM
// While there is a caching mechanism that normally shouldn't be shared in a benchmark iteration,
// it's useful to evaluate its impact here, because the caching will also improve the evaluation performance afterwards.
vm := MakeVM(Opts{ImportPaths: []string{tempDir}})
content, err := os.ReadFile(filepath.Join(tempDir, "main.jsonnet"))
require.NoError(b, err)

// Run the benchmark
mainPath := filepath.Join(tempDir, "main.jsonnet")
c := string(content)
b.ResetTimer()
for i := 0; i < b.N; i++ {
fileHashes = sync.Map{}
hash, err := getSnippetHash(vm, mainPath, c)
require.NoError(b, err)
require.Equal(b, "XrkW8N2EvkFMvdIuHTsGsQespVUl9_xiFmM7v1mqX5s=", hash)
if !importAllFromMain {
mainContentSplit = append(mainContentSplit, "import 'file0.libsonnet'")
}
require.NoError(t, os.WriteFile(filepath.Join(dir, "main.jsonnet"), []byte(strings.Join(mainContentSplit, " + ")), 0644))
allFiles = append(allFiles, filepath.Join(dir, "main.jsonnet"))
require.NoError(t, os.WriteFile(filepath.Join(dir, "file1000.libsonnet"), []byte(`"a string"`), 0644))
allFiles = append(allFiles, filepath.Join(dir, "file1000.libsonnet"))

return allFiles
}

0 comments on commit 904a7c5

Please sign in to comment.