diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 32a02413..557be441 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,12 +18,13 @@ But perhaps the most useful thing you can do is **use the tool**. Join the [Disc ## Project structure -There are two main parts to the codebase: +There are three main parts to the codebase: -- `go/`: This contains the `replicate` command-line interface. It also provides a shared library that the Python library uses in `go/pkg/shared/`. This is called with subprocess and jsonrpc via stdout/in (it's like CGI RPC!). +- `go/`: This contains the `replicate` command-line interface. It also provides a shared library that the Python library uses in `go/pkg/shared/`. The shared library runs in a standalone GRPC server in a Python subprocess. - `python/`: This is the `replicate` Python library. The Python package also includes the `replicate` Go command-line interface and a Go shared library. +- `proto/`: This defines the interface between the Go server and the Python client. -The main mechanism that is shared between these two parts is the storage mechanism – reading/saving files on Amazon S3 or Google Cloud Storage. By implementing this in Go, we don't have to add a bazillion dependencies to the Python project. All other abstractions are mostly duplicated across the two languages (repositories, experiments, checkpoints, etc), but this line might move over time. +The Python library acts as a thin client on top of the Go GRPC server, and all the heavy lifting is done in Go. The other parts are: @@ -100,6 +101,8 @@ This will build the CLI and the Python package: The built Python packages are in `python/dist/`. These contain both the CLI and the Python library. +To generate the Protobuf implementations you need to install the required Protobuf tools. This is documented in `proto/Makefile`. Once they're installed, simply run `make build` from the `proto` folder. + ## Release This will release both the CLI and Python package: diff --git a/Makefile b/Makefile index 8f883e45..49b4ced1 100644 --- a/Makefile +++ b/Makefile @@ -81,3 +81,8 @@ verify-go-version: .PHONY: verify-python-version verify-python-version: @./makefile-scripts/verify-python-version.sh + +.PHONY: fmt +fmt: + cd go && $(MAKE) fmt + cd python && $(MAKE) fmt diff --git a/go/Makefile b/go/Makefile index 9789477b..fdc907f2 100644 --- a/go/Makefile +++ b/go/Makefile @@ -95,8 +95,8 @@ lint: go run github.com/golangci/golangci-lint/cmd/golangci-lint run ./... .PHONY: fmt -fmt: install-goimports - goimports --local replicate.ai -w -d . +fmt: + go run golang.org/x/tools/cmd/goimports --local replicate.ai -w -d . .PHONY: mod-tidy mod-tidy: diff --git a/go/cmd/replicate-shared/main.go b/go/cmd/replicate-shared/main.go index 56ae5959..fed1018f 100644 --- a/go/cmd/replicate-shared/main.go +++ b/go/cmd/replicate-shared/main.go @@ -1,9 +1,13 @@ package main import ( - "github.com/replicate/replicate/go/pkg/shared" + "github.com/replicate/replicate/go/pkg/cli" + "github.com/replicate/replicate/go/pkg/console" ) func main() { - shared.Serve() + cmd := cli.NewDaemonCommand() + if err := cmd.Execute(); err != nil { + console.Fatal("%s", err) + } } diff --git a/go/go.mod b/go/go.mod index 148b7b15..9fbc7907 100644 --- a/go/go.mod +++ b/go/go.mod @@ -9,7 +9,8 @@ require ( github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect github.com/ghodss/yaml v1.0.0 github.com/go-bindata/go-bindata v3.1.2+incompatible - github.com/golangci/golangci-lint v1.34.1 + github.com/golang/protobuf v1.4.3 + github.com/golangci/golangci-lint v1.32.2 github.com/hashicorp/go-uuid v1.0.2 github.com/kami-zh/go-capturer v0.0.0-20171211120116-e492ea43421d github.com/logrusorgru/aurora v2.0.3+incompatible @@ -29,7 +30,11 @@ require ( github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58 golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9 - golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a - google.golang.org/api v0.36.0 - gotest.tools/gotestsum v0.6.0 + golang.org/x/sys v0.0.0-20201119102817-f84b799fce68 // indirect + golang.org/x/tools v0.0.0-20201121010211-780cb80bd7fb + google.golang.org/api v0.32.0 + google.golang.org/genproto v0.0.0-20200921151605-7abf4a1a14d5 + google.golang.org/grpc v1.33.2 + google.golang.org/protobuf v1.25.0 + gotest.tools/gotestsum v0.5.2 ) diff --git a/go/go.sum b/go/go.sum index baaa6547..1f9e25ad 100644 --- a/go/go.sum +++ b/go/go.sum @@ -19,9 +19,8 @@ cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZ cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= cloud.google.com/go v0.65.0 h1:Dg9iHVQfrhq82rUNu9ZxUDrJLaxFUe/HlCVaLyRruq8= cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.66.0 h1:DZeAkuQGQqnm9Xv36SbMJEU8aFBz4wL04UpMWPWwjzg= cloud.google.com/go v0.66.0/go.mod h1:dgqGAjKCDxyhGTtC9dAREQGUJpkceNm1yt590Qno0Ko= -cloud.google.com/go v0.72.0 h1:eWRCuwubtDrCJG0oSUMgnsbD4CmPFQF2ei4OFbXvwww= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0 h1:xE3CPsOgttP4ACBePh79zTKALtXwn/Edhcr16R5hMWU= @@ -74,10 +73,6 @@ github.com/araddon/dateparse v0.0.0-20200409225146-d820a6159ab1/go.mod h1:SLqhdZ github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/ashanbrown/forbidigo v1.0.0 h1:QdNXBduDUopc3GW+YVYZn8jzmIMklQiCfdN2N5+dQeE= -github.com/ashanbrown/forbidigo v1.0.0/go.mod h1:PH+zMRWE15yW69fYfe7Kn8nYR6yYyafc3ntEGh2BBAg= -github.com/ashanbrown/makezero v0.0.0-20201205152432-7b7cdbb3025a h1:/U9tbJzDRof4fOR51vwzWdIBsIH6R2yU0KG1MBRM2Js= -github.com/ashanbrown/makezero v0.0.0-20201205152432-7b7cdbb3025a/go.mod h1:oG9Dnez7/ESBqc4EdrdNlryeo7d0KcW1ftXHm7nU/UU= github.com/aws/aws-sdk-go v1.36.20 h1:IQr81xegCd40Xq21ZjFToKw9llaCzO1LRE75CgnvJ1Q= github.com/aws/aws-sdk-go v1.36.20/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= @@ -105,13 +100,13 @@ github.com/creack/pty v1.1.9 h1:uDmaGzcdjhF4i/plgjmEsriH11Y0o7RKapEf/LDaM3w= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw= github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/daixiang0/gci v0.2.7 h1:bosLNficubzJZICsVzxuyNc6oAbdz0zcqLG2G/RxtY4= -github.com/daixiang0/gci v0.2.7/go.mod h1:+4dZ7TISfSmqfAGv59ePaHfNzgGtIkHAhhdKggP1JAc= +github.com/daixiang0/gci v0.2.4 h1:BUCKk5nlK2m+kRIsoj+wb/5hazHvHeZieBKWd9Afa8Q= +github.com/daixiang0/gci v0.2.4/go.mod h1:+AV8KmHTGxxwp/pY84TLQfFKp2vuKXXJVzF3kD/hfR4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/denis-tingajkin/go-header v0.4.2 h1:jEeSF4sdv8/3cT/WY8AgDHUoItNSoEZ7qg9dX7pc218= -github.com/denis-tingajkin/go-header v0.4.2/go.mod h1:eLRHAVXzE5atsKAnNRDB90WHCFFnBUn4RN0nRcs1LJA= +github.com/denis-tingajkin/go-header v0.3.1 h1:ymEpSiFjeItCy1FOP+x0M2KdCELdEAHUsNa8F+hHc6w= +github.com/denis-tingajkin/go-header v0.3.1/go.mod h1:sq/2IxMhaZX+RRcgHfCRx/m0M5na0fBt4/CRe7Lrji0= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q= @@ -218,12 +213,15 @@ github.com/golangci/errcheck v0.0.0-20181223084120-ef45e06d44b6 h1:YYWNAGTKWhKpc github.com/golangci/errcheck v0.0.0-20181223084120-ef45e06d44b6/go.mod h1:DbHgvLiFKX1Sh2T1w8Q/h4NAI8MHIpzCdnBUDTXU3I0= github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 h1:9kfjN3AdxcbsZBf8NjltjWihK2QfBBBZuv91cMFfDHw= github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8= +github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3 h1:pe9JHs3cHHDQgOFXJJdYkK6fLz2PWyYtP4hthoCMvs8= +github.com/golangci/goconst v0.0.0-20180610141641-041c5f2b40f3/go.mod h1:JXrF4TWy4tXYn62/9x8Wm/K/dm06p8tCKwFRDPZG/1o= github.com/golangci/gocyclo v0.0.0-20180528144436-0a533e8fa43d h1:pXTK/gkVNs7Zyy7WKgLXmpQ5bHTrq5GDsp8R9Qs67g0= github.com/golangci/gocyclo v0.0.0-20180528144436-0a533e8fa43d/go.mod h1:ozx7R9SIwqmqf5pRP90DhR2Oay2UIjGuKheCBCNwAYU= github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a h1:iR3fYXUjHCR97qWS8ch1y9zPNsgXThGwjKPrYfqMPks= github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU= -github.com/golangci/golangci-lint v1.34.1 h1:xf1yVlLBNeCIoOHWXhwqnUeaqzONllRSgiLSahNt0Mw= -github.com/golangci/golangci-lint v1.34.1/go.mod h1:6Bnn7T0JYin7uukitgL6f9E9auQatlT0RMNOKG9lSHU= +github.com/golangci/golangci-lint v1.32.2 h1:CgIeFWTLJ3Nt1w/WU1RO351j/CjN6LIVjppbJfI9nMk= +github.com/golangci/golangci-lint v1.32.2/go.mod h1:ydr+IqtIVyAh72L16aK0bNdNg/YGa+AEgdbKj9MluzI= +github.com/golangci/golangci-lint v1.32.2/go.mod h1:ydr+IqtIVyAh72L16aK0bNdNg/YGa+AEgdbKj9MluzI= github.com/golangci/ineffassign v0.0.0-20190609212857-42439a7714cc h1:gLLhTLMk2/SutryVJ6D4VZCU3CUqr8YloG7FPIBWFpI= github.com/golangci/ineffassign v0.0.0-20190609212857-42439a7714cc/go.mod h1:e5tpTHCfVze+7EpLEozzMB3eafxo2KT5veNg1k6byQU= github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA= @@ -251,15 +249,10 @@ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0 h1:pMen7vLs8nvgEYhywH3KDWJIJTeEr2ULsVWHWYHQyBs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0 h1:wCKgOCHuUEVfsaQLpPSJb7VdYCdTVZQAuOdYm1yc/60= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -268,7 +261,6 @@ github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200905233945-acf8798be1f7/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= @@ -285,12 +277,8 @@ github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.m github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= github.com/gostaticanalysis/analysisutil v0.1.0 h1:E4c8Y1EQURbBEAHoXc/jBTK7Np14ArT8NPUiSFOl9yc= github.com/gostaticanalysis/analysisutil v0.1.0/go.mod h1:dMhHRU9KTiDcuLGdy87/2gTR8WruwYZrKdRq9m1O6uw= -github.com/gostaticanalysis/analysisutil v0.4.1 h1:/7clKqrVfiVwiBQLM0Uke4KvXnO6JcCTS7HwF2D6wG8= -github.com/gostaticanalysis/analysisutil v0.4.1/go.mod h1:18U/DLpRgIUd459wGxVHE0fRgmo1UgHDcbw7F5idXu0= github.com/gostaticanalysis/comment v1.3.0 h1:wTVgynbFu8/nz6SGgywA0TcyIoAVsYc7ai/Zp5xNGlw= github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnqWxw9wCauCMKp+IBI= -github.com/gostaticanalysis/comment v1.4.1 h1:xHopR5L2lRz6OsjH4R2HG5wRhW9ySl3FsHIvi5pcXwc= -github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= @@ -319,11 +307,8 @@ github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2p github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/jgautheron/goconst v0.0.0-20201117150253-ccae5bf973f3 h1:7nkB9fLPMwtn/R6qfPcHileL/x9ydlhw8XyDrLI1ZXg= -github.com/jgautheron/goconst v0.0.0-20201117150253-ccae5bf973f3/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= github.com/jingyugao/rowserrcheck v0.0.0-20191204022205-72ab7603b68a h1:GmsqmapfzSJkm28dhRoHz2tLRbJmqhU86IPgBtN3mmk= github.com/jingyugao/rowserrcheck v0.0.0-20191204022205-72ab7603b68a/go.mod h1:xRskid8CManxVta/ALEhJha/pweKBaVG6fWgc0yH25s= github.com/jirfag/go-printf-func-name v0.0.0-20191110105641-45db9963cdd3 h1:jNYPNLe3d8smommaoQlK7LOA5ESyUJJ+Wf79ZtA7Vp4= @@ -371,12 +356,8 @@ github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kulti/thelper v0.1.0 h1:ig1EW6yhDiRNN3dplbhdsW2gTvbxTz9i4q5Rr/tRfpk= -github.com/kulti/thelper v0.1.0/go.mod h1:vMu2Cizjy/grP+jmsvOFDx1kYP6+PD1lqg4Yu5exl2U= -github.com/kunwardeep/paralleltest v1.0.2 h1:/jJRv0TiqPoEy/Y8dQxCFJhD56uS/pnvtatgTZBHokU= -github.com/kunwardeep/paralleltest v1.0.2/go.mod h1:ZPqNm1fVHPllh5LPVujzbVz1JN2GhLxSfY+oqUsvG30= -github.com/kyoh86/exportloopref v0.1.8 h1:5Ry/at+eFdkX9Vsdw3qU4YkvGtzuVfzT4X7S77LoN/M= -github.com/kyoh86/exportloopref v0.1.8/go.mod h1:1tUcJeiioIs7VWe5gcOObrux3lb66+sBqGZrRkMwPgg= +github.com/kyoh86/exportloopref v0.1.7 h1:u+iHuTbkbTS2D/JP7fCuZDo/t3rBVGo3Hf58Rc+lQVY= +github.com/kyoh86/exportloopref v0.1.7/go.mod h1:h1rDl2Kdj97+Kwh4gdz3ujE7XHmH51Q0lUiZ1z4NLj8= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= github.com/logrusorgru/aurora v2.0.3+incompatible h1:tOpm7WcpBTn4fjmVfgpQq0EfczGlG91VSDkswnjF5A8= @@ -474,8 +455,8 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/polyfloyd/go-errorlint v0.0.0-20201127212506-19bd8db6546f h1:xAw10KgJqG5NJDfmRqJ05Z0IFblKumjtMeyiOLxj3+4= -github.com/polyfloyd/go-errorlint v0.0.0-20201127212506-19bd8db6546f/go.mod h1:wi9BfjxjF/bwiZ701TzmfKu6UKC357IOAtNr0Td0Lvw= +github.com/polyfloyd/go-errorlint v0.0.0-20201006195004-351e25ade6e3 h1:Amgs0nbayPhBNGh1qPqqr2e7B2qNAcBgRjnBH/lmn8k= +github.com/polyfloyd/go-errorlint v0.0.0-20201006195004-351e25ade6e3/go.mod h1:wi9BfjxjF/bwiZ701TzmfKu6UKC357IOAtNr0Td0Lvw= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= @@ -495,10 +476,10 @@ github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95/go.mod h1:r github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.5.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryancurrah/gomodguard v1.2.0 h1:YWfhGOrXwLGiqcC/u5EqG6YeS8nh+1fw0HEc85CVZro= -github.com/ryancurrah/gomodguard v1.2.0/go.mod h1:rNqbC4TOIdUDcVMSIpNNAzTbzXAZa6W5lnUepvuMMgQ= +github.com/ryancurrah/gomodguard v1.1.0 h1:DWbye9KyMgytn8uYpuHkwf0RHqAYO6Ay/D0TbCpPtVU= +github.com/ryancurrah/gomodguard v1.1.0/go.mod h1:4O8tr7hBODaGE6VIhfJDHcwzh5GUccKSJBU0UMXJFVM= github.com/ryanrolds/sqlclosecheck v0.3.0 h1:AZx+Bixh8zdUBxUA1NxbxVAS78vTPq4rCb8OUZI9xFw= github.com/ryanrolds/sqlclosecheck v0.3.0/go.mod h1:1gREqxyTGR3lVtpngyFo3hZAgk0KCtEdgEkHwDbigdA= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= @@ -565,15 +546,15 @@ github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/tdakkota/asciicheck v0.0.0-20200416190851-d7f85be797a2 h1:Xr9gkxfOP0KQWXKNqmwe8vEeSUiUj4Rlee9CMVX2ZUQ= github.com/tdakkota/asciicheck v0.0.0-20200416190851-d7f85be797a2/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM= -github.com/tetafro/godot v1.3.2 h1:HzWC3XjadkyeuBZxkfAFNY20UVvle0YD51I6zf6RKlU= -github.com/tetafro/godot v1.3.2/go.mod h1:ah7jjYmOMnIjS9ku2krapvGQrFNtTLo9Z/qB3dGU1eU= +github.com/tetafro/godot v0.4.9 h1:dSOiuasshpevY73eeI3+zaqFnXSBKJ3mvxbyhh54VRo= +github.com/tetafro/godot v0.4.9/go.mod h1:/7NLHhv08H1+8DNj0MElpAACw1ajsCuf3TKNQxA5S+0= github.com/timakin/bodyclose v0.0.0-20190930140734-f7f2e9bca95e h1:RumXZ56IrCj4CL+g1b9OL/oH0QnsF976bC8xQFYUD5Q= github.com/timakin/bodyclose v0.0.0-20190930140734-f7f2e9bca95e/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tomarrell/wrapcheck v0.0.0-20200807122107-df9e8bcb914d h1:3EZyvNUMsGD1QA8cu0STNn1L7I77rvhf2IhOcHYQhSw= github.com/tomarrell/wrapcheck v0.0.0-20200807122107-df9e8bcb914d/go.mod h1:yiFB6fFoV7saXirUGfuK+cPtUh4NX/Hf5y2WC2lehu0= -github.com/tommy-muehle/go-mnd v1.3.1-0.20201008215730-16041ac3fe65 h1:Y0bLA422kvb32uZI4fy/Plop/Tbld0l9pSzl+j1FWok= -github.com/tommy-muehle/go-mnd v1.3.1-0.20201008215730-16041ac3fe65/go.mod h1:T22e7iRN4LsFPZGyRLRXeF+DWVXFuV9thsyO7NjbbTI= +github.com/tommy-muehle/go-mnd v1.3.1-0.20200224220436-e6f9a994e8fa h1:RC4maTWLKKwb7p1cnoygsbKIgNlJqSYBeAFON3Ar8As= +github.com/tommy-muehle/go-mnd v1.3.1-0.20200224220436-e6f9a994e8fa/go.mod h1:dSUh0FtTP8VhvkL1S+gUR1OKd9ZnSaozuI6r3m6wOig= github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8= github.com/ulikunitz/xz v0.5.7 h1:YvTNdFzX6+W5m9msiYg/zpkSURPPtOlzbqYjrFn7Yt4= github.com/ulikunitz/xz v0.5.7/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= @@ -605,8 +586,6 @@ go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4 h1:LYy1Hy3MJdrCdMwwzxA/dRok4ejH+RwNGbuoD9fCjto= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5 h1:dntmOdLpSpHlVqbW5Eay97DelsZHe+55D+xC6i0dDS0= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= @@ -653,8 +632,6 @@ golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0 h1:8pl+sMODzuvGJkmj2W4kZihvVb5mKm8pB/X44PIQHv8= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -694,7 +671,6 @@ golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81R golang.org/x/net v0.0.0-20200904194848-62affa334b73/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974 h1:IX6qOQeG5uLjB/hjjwjedwfjND0hgjPMMyO1RoIXQNI= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b h1:uwuIcX0g4Yl1NC5XAz37xsr2lTtcqevgzYNVt49waME= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -767,8 +743,8 @@ golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201009025420-dfb3f7c4e634/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3 h1:kzM6+9dur93BcC2kVlYl34cHU+TYZLanmpSJHVMmL64= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68 h1:nxC68pudNYkKU6jWhgrqdreuFiOQWj1Fs7T3VrH4Pjw= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -777,8 +753,6 @@ golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4 h1:0YWbFKbhXG/wIiuHDSKpS0Iy7FSA+u45VtBMfQcFTTc= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4 h1:SvFZT6jyqRaOeXpc5h/JSfZenJ2O330aBsf7JfSUXmQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -789,6 +763,7 @@ golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190221204921-83362c3779f5/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3/go.mod h1:25r3+/G6/xytQM8iWZKq3Hn0kr0rgFKPUNVEL/dr3z4= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= @@ -808,7 +783,6 @@ golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190916130336-e45ffcd953cc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -831,8 +805,8 @@ golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapK golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200321224714-0d839f3cf2ed/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4 h1:kDtqNkeBrZb8B+atrj50B5XLHpzXXqcCdZPP/ApQ5NY= golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200410194907-79a7a3126eef/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= @@ -843,14 +817,13 @@ golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200625211823-6506e20df31f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200731060945-b5fad4ed8dd6/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200812195022-5ae4c3c160a0/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200828161849-5deb26317202/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= @@ -861,12 +834,9 @@ golang.org/x/tools v0.0.0-20201001104356-43ebab892c4c/go.mod h1:z6u4i615ZeAfBE4X golang.org/x/tools v0.0.0-20201002184944-ecd9fd270d5d/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= golang.org/x/tools v0.0.0-20201007032633-0806396f153e/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= golang.org/x/tools v0.0.0-20201011145850-ed2f50202694/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201114224030-61ea331ec02b/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201118003311-bd56c0adb394/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201013201025-64a9e34f3752/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20201121010211-780cb80bd7fb h1:z5+u0pkAUPUWd3taoTialQ2JAMo4Wo1Z3L25U4ZV9r0= golang.org/x/tools v0.0.0-20201121010211-780cb80bd7fb/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a h1:+77BOOi9CMFjpy3D2P/OnfSSmC/Hx/fGAQJUAQaM2gc= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= @@ -891,11 +861,8 @@ google.golang.org/api v0.29.0 h1:BaiDisFir8O4IJxvAabCGGkQ6yCJegNQqSVoYUNAnbk= google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= google.golang.org/api v0.31.0/go.mod h1:CL+9IBCa2WWU6gRuBWaKqGWLFFwbEUXkfeMkHLQWYWo= +google.golang.org/api v0.32.0 h1:Le77IccnTqEa8ryp9wIpX5W3zYm7Gf9LhOp9PHcwFts= google.golang.org/api v0.32.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.35.0 h1:TBCmTTxUrRDA1iTctnK/fIeitxIZ+TQuaf0j29fmCGo= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0 h1:l2Nfbl2GPXdWorv+dT2XfinX2jOOw4zv1VhLstx+6rE= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -904,8 +871,6 @@ google.golang.org/appengine v1.6.5 h1:tycE03LOZYQNhDpS27tcQdAzLCVMaj7QT2SXxebnpC google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6 h1:lMO5rYAqUxkmaj76jAkRUvt5JZgFymx/+Q5Mzfivuhc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -941,10 +906,8 @@ google.golang.org/genproto v0.0.0-20200831141814-d751682dd103/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d h1:92D1fum1bJLKSdr11OJ+54YeCMCGYIygTA7R/YZxH5M= google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200914193844-75d14daec038/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200921151605-7abf4a1a14d5 h1:B9nroC8SSX5GtbVvxPF9tYIVkaCpjhVLOrlAY8ONzm8= google.golang.org/genproto v0.0.0-20200921151605-7abf4a1a14d5/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e h1:wYR00/Ht+i/79g/gzhdehBgLIJCklKoc8Q/NebdzzpY= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -959,7 +922,6 @@ google.golang.org/grpc v1.29.1 h1:EC2SB8S04d2r73uptxphDSUG+kTKVgjRPF+N3xpxRB4= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1 h1:SfXqXS5hkufcdZ/mHtYCh53P2b+92WQq/DZcKLgsFRs= google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.2 h1:EQyQC3sa8M+p6Ulc8yy9SWSS2GVwyRc83gAbG8lrl4o= @@ -998,16 +960,12 @@ gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools/gotestsum v0.6.0 h1:0zIxynXq9gkAcRpboAi3qOQIkZkCt/stfQzd7ab7Czs= -gotest.tools/gotestsum v0.6.0/go.mod h1:LEX+ioCVdeWhZc8GYfiBRag360eBhwixWJ62R9eDQtI= +gotest.tools/gotestsum v0.5.2 h1:sSKWtEFqorHhuBCHU6MeUl50cq9U2J3d1m5NlQTVrbY= +gotest.tools/gotestsum v0.5.2/go.mod h1:hC9TQserDVTWcJuARh76Ydp3ZwuE+pIIWpt2BzDLD6M= gotest.tools/v3 v3.0.2 h1:kG1BFyqVHuQoVQiR1bWGnfz/fmHvvuiSPIV7rvl360E= gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= -gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0= -gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -1018,8 +976,8 @@ honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9 honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.6 h1:W18jzjh8mfPez+AwGLxmOImucz/IFjpNlrKVnaj2YVc= honnef.co/go/tools v0.0.1-2020.1.6/go.mod h1:pyyisuGw24ruLjrr1ddx39WE0y9OooInRzEYLhQB2YY= -mvdan.cc/gofumpt v0.0.0-20201129102820-5c11c50e9475 h1:5ZmJGYyuTlhdlIpRxSFhdJqkXQweXETFCEaLhRAX3e8= -mvdan.cc/gofumpt v0.0.0-20201129102820-5c11c50e9475/go.mod h1:E4LOcu9JQEtnYXtB1Y51drqh2Qr2Ngk9J3YrRCwcbd0= +mvdan.cc/gofumpt v0.0.0-20200802201014-ab5a8192947d h1:t8TAw9WgTLghti7RYkpPmqk4JtQ3+wcP5GgZqgWeWLQ= +mvdan.cc/gofumpt v0.0.0-20200802201014-ab5a8192947d/go.mod h1:bzrjFmaD6+xqohD3KYP0H2FEuxknnBmyyOxdhLdaIws= mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I= mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo= diff --git a/go/pkg/cli/checkout.go b/go/pkg/cli/checkout.go index b23b6e2a..80fc457c 100644 --- a/go/pkg/cli/checkout.go +++ b/go/pkg/cli/checkout.go @@ -3,7 +3,6 @@ package cli import ( "fmt" "os" - "path" "path/filepath" "github.com/logrusorgru/aurora" @@ -12,7 +11,6 @@ import ( "github.com/replicate/replicate/go/pkg/console" "github.com/replicate/replicate/go/pkg/files" "github.com/replicate/replicate/go/pkg/project" - "github.com/replicate/replicate/go/pkg/repository" ) type checkoutOpts struct { @@ -42,24 +40,8 @@ func newCheckoutCommand() *cobra.Command { return cmd } -// Returns the repository requested by opts.repositoryURL -func getRepositoryFromOpts(opts checkoutOpts) (repository.Repository, error) { - repositoryURL, projectDir, err := getRepositoryURLFromStringOrConfig(opts.repositoryURL) - if err != nil { - return nil, err - } - - repo, err := getRepository(repositoryURL, projectDir) - if err != nil { - return nil, err - } - - return repo, nil -} - // Returns the experiment and the most appropriate checkpoint for that experiment. -func getExperimentAndCheckpoint(prefix string, repo repository.Repository) (*project.Experiment, *project.Checkpoint, error) { - proj := project.NewProject(repo) +func getExperimentAndCheckpoint(prefix string, proj *project.Project, projectDir string) (*project.Experiment, *project.Checkpoint, error) { result, err := proj.CheckpointOrExperimentFromPrefix(prefix) if err != nil { return nil, nil, err @@ -151,12 +133,17 @@ func overwriteDisplayPathPrompt(displayPath string, force bool) error { func checkoutCheckpoint(opts checkoutOpts, args []string) error { prefix := args[0] - repo, err := getRepositoryFromOpts(opts) + repositoryURL, projectDir, err := getRepositoryURLFromStringOrConfig(opts.repositoryURL) + if err != nil { + return err + } + repo, err := getRepository(repositoryURL, projectDir) if err != nil { return err } - experiment, checkpoint, err := getExperimentAndCheckpoint(prefix, repo) + proj := project.NewProject(repo, projectDir) + experiment, checkpoint, err := getExperimentAndCheckpoint(prefix, proj, projectDir) if err != nil { return err } @@ -191,112 +178,8 @@ func checkoutCheckpoint(opts checkoutOpts, args []string) error { checkoutPath := opts.checkoutPath if checkoutPath == "" { - return checkoutEntireCheckpoint(outputDir, repo, experiment, checkpoint) - } else { - return checkoutFileOrDir(outputDir, checkoutPath, repo, experiment, checkpoint) - } -} - -// checkout all the files from an experiment or checkpoint -func checkoutEntireCheckpoint(outputDir string, repo repository.Repository, experiment *project.Experiment, checkpoint *project.Checkpoint) error { - // Extract the tarfile - experimentFilesExist := true - checkpointFilesExist := true - - if err := repo.GetPathTar(path.Join("experiments", experiment.ID+".tar.gz"), outputDir); err != nil { - // Ignore does not exist errors - if _, ok := err.(*repository.DoesNotExistError); ok { - console.Debug("No experiment data found") - experimentFilesExist = false - } else { - return err - } + return proj.CheckoutCheckpoint(checkpoint, experiment, outputDir, false) } else { - console.Info("Copied the files from experiment %s to %q", experiment.ShortID(), filepath.Join(outputDir, experiment.Path)) - } - - // Overlay checkpoint on top of experiment - if checkpoint != nil { - - if err := repo.GetPathTar(path.Join("checkpoints", checkpoint.ID+".tar.gz"), outputDir); err != nil { - if _, ok := err.(*repository.DoesNotExistError); ok { - console.Debug("No checkpoint data found") - checkpointFilesExist = false - } else { - return err - - } - } else { - console.Info("Copied the files from checkpoint %s to %q", checkpoint.ShortID(), filepath.Join(outputDir, checkpoint.Path)) - } - + return proj.CheckoutFileOrDirectory(checkpoint, experiment, outputDir, checkoutPath) } - - if !experimentFilesExist && !checkpointFilesExist { - // Just an experiment, no checkpoints - if checkpoint == nil { - return fmt.Errorf("The experiment %s does not have any files associated with it. You need to pass the 'path' argument to 'init()' to check out files.", experiment.ShortID()) - } - return fmt.Errorf("Neither the experiment %s nor the checkpoint %s has any files associated with it. You need to pass the 'path' argument to 'init()' or 'checkpoint()' to check out files.", experiment.ShortID(), checkpoint.ShortID()) - } - - console.Info(`If you want to run this experiment again, this is how it was run: - - ` + experiment.Command + ` -`) - - return nil - -} - -// checkout all the files from an experiment or checkpoint -func checkoutFileOrDir(outputDir string, checkoutPath string, repo repository.Repository, experiment *project.Experiment, checkpoint *project.Checkpoint) error { - // Extract the tarfile - experimentFilesExist := true - checkpointFilesExist := true - - if err := repo.GetPathItemTar(path.Join("experiments", experiment.ID+".tar.gz"), checkoutPath, outputDir); err != nil { - // Ignore does not exist errors - if _, ok := err.(*repository.DoesNotExistError); ok { - console.Debug("No experiment data found") - experimentFilesExist = false - } else { - return err - } - } else { - console.Info("Copied the path %s from experiment %s to %q", checkoutPath, experiment.ShortID(), filepath.Join(outputDir, experiment.Path)) - } - - // Overlay checkpoint on top of experiment - if checkpoint != nil { - - if err := repo.GetPathItemTar(path.Join("checkpoints", checkpoint.ID+".tar.gz"), checkoutPath, outputDir); err != nil { - if _, ok := err.(*repository.DoesNotExistError); ok { - console.Debug("No checkpoint data found") - checkpointFilesExist = false - } else { - return err - - } - } else { - console.Info("Copied the path %s from checkpoint %s to %q", checkoutPath, checkpoint.ShortID(), filepath.Join(outputDir, checkpoint.Path)) - } - - } - - if !experimentFilesExist && !checkpointFilesExist { - // Just an experiment, no checkpoints - if checkpoint == nil { - return fmt.Errorf("The experiment %s does not have the path %s associated with it. You need to pass the 'path' argument to 'init()' to check out files.", experiment.ShortID(), checkoutPath) - } - return fmt.Errorf("Neither the experiment %s nor the checkpoint %s has the path %s associated with it. You need to pass the 'path' argument to 'init()' or 'checkpoint()' to check out files.", experiment.ShortID(), checkpoint.ShortID(), checkoutPath) - } - - console.Info(`If you want to run this experiment again, this is how it was run: - - ` + experiment.Command + ` -`) - - return nil - } diff --git a/go/pkg/cli/common.go b/go/pkg/cli/common.go index 26c54fc8..e9eeb8d8 100644 --- a/go/pkg/cli/common.go +++ b/go/pkg/cli/common.go @@ -28,20 +28,31 @@ func addRepositoryURLFlagVar(cmd *cobra.Command, opt *string) { } // getRepositoryURLFromStringOrConfig attempts to get it from passed string from --repository, -// otherwise finds replicate.yaml recursively +// otherwise finds replicate.yaml recursively. +// The project directory is determined by the following logic: +// * If an explicit directory is passed with -D, that is used +// * Else, if repository URL isn't manually passed with -R, the directory of replicate.yaml is used +// * Otherwise, the current working directory is used +// Returns (repositoryURL, projectDir, error) func getRepositoryURLFromStringOrConfig(repositoryURL string) (string, string, error) { + projectDir := global.ProjectDirectory if repositoryURL == "" { - conf, projectDir, err := config.FindConfigInWorkingDir(global.ProjectDirectory) + conf, confProjectDir, err := config.FindConfigInWorkingDir(global.ProjectDirectory) if err != nil { return "", "", err } - return conf.Repository, projectDir, nil + if repositoryURL == "" { + repositoryURL = conf.Repository + } + if global.ProjectDirectory == "" { + projectDir = confProjectDir + } else { + projectDir = global.ProjectDirectory + } } - // if global.ProjectDirectory == "", abs of that is cwd - // FIXME (bfirsh): this does not look up directories for replicate.yaml, so might be the wrong - // projectDir. It should probably use return value of FindConfigInWorkingDir. - projectDir, err := filepath.Abs(global.ProjectDirectory) + // abs of "" if cwd + projectDir, err := filepath.Abs(projectDir) if err != nil { return "", "", fmt.Errorf("Failed to determine absolute directory of '%s': %w", global.ProjectDirectory, err) } @@ -71,14 +82,14 @@ func getProjectDir() (string, error) { // getRepository returns the project's repository, with caching if needed // This is not in repository package so we can do user interface stuff around syncing func getRepository(repositoryURL, projectDir string) (repository.Repository, error) { - repo, err := repository.ForURL(repositoryURL) + repo, err := repository.ForURL(repositoryURL, projectDir) if err != nil { return nil, err } // projectDir might be "" if you use --repository option if repository.NeedsCaching(repo) && projectDir != "" { console.Info("Fetching new data from %q...", repo.RootURL()) - repo, err = repository.NewCachedMetadataRepository(repo, projectDir) + repo, err = repository.NewCachedMetadataRepository(projectDir, repo) if err != nil { return nil, err } diff --git a/go/pkg/cli/daemon.go b/go/pkg/cli/daemon.go new file mode 100644 index 00000000..87d8d044 --- /dev/null +++ b/go/pkg/cli/daemon.go @@ -0,0 +1,40 @@ +package cli + +import ( + "github.com/spf13/cobra" + + "github.com/replicate/replicate/go/pkg/project" + "github.com/replicate/replicate/go/pkg/shared" +) + +func NewDaemonCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "replicate-daemon ", + RunE: runDaemon, + } + setPersistentFlags(cmd) + addRepositoryURLFlag(cmd) + return cmd +} + +func runDaemon(cmd *cobra.Command, args []string) error { + socketPath := args[0] + + projectGetter := func() (proj *project.Project, err error) { + repositoryURL, projectDir, err := getRepositoryURLFromFlagOrConfig(cmd) + if err != nil { + return nil, err + } + repo, err := getRepository(repositoryURL, projectDir) + if err != nil { + return nil, err + } + proj = project.NewProject(repo, projectDir) + return proj, err + } + + if err := shared.Serve(projectGetter, socketPath); err != nil { + return err + } + return nil +} diff --git a/go/pkg/cli/diff.go b/go/pkg/cli/diff.go index 9919abb9..323b3777 100644 --- a/go/pkg/cli/diff.go +++ b/go/pkg/cli/diff.go @@ -50,7 +50,7 @@ func diffCheckpoints(cmd *cobra.Command, args []string) error { if err != nil { return err } - proj := project.NewProject(repo) + proj := project.NewProject(repo, projectDir) au := getAurora() return printDiff(os.Stdout, au, proj, prefix1, prefix2) } @@ -145,7 +145,7 @@ func printMapDiff(w *tabwriter.Writer, au aurora.Aurora, map1, map2 map[string]s // Returns a map of checkpoint things we want to show in diff func checkpointToMap(checkpoint *project.Checkpoint) map[string]string { return map[string]string{ - "Step": strconv.Itoa(checkpoint.Step), + "Step": strconv.FormatInt(checkpoint.Step, 10), "Created": checkpoint.Created.In(timezone).Format(time.RFC1123), "Path": checkpoint.Path, } diff --git a/go/pkg/cli/diff_test.go b/go/pkg/cli/diff_test.go index 682c5ebe..efa35347 100644 --- a/go/pkg/cli/diff_test.go +++ b/go/pkg/cli/diff_test.go @@ -21,7 +21,7 @@ func TestDiffSameExperiment(t *testing.T) { conf := &config.Config{} repo := createShowTestData(t, workingDir, conf) - proj := project.NewProject(repo) + proj := project.NewProject(repo, workingDir) au := aurora.NewAurora(false) out := new(bytes.Buffer) @@ -59,7 +59,7 @@ func TestDiffDifferentExperiment(t *testing.T) { conf := &config.Config{} repo := createShowTestData(t, workingDir, conf) - proj := project.NewProject(repo) + proj := project.NewProject(repo, workingDir) au := aurora.NewAurora(false) out := new(bytes.Buffer) diff --git a/go/pkg/cli/list/list.go b/go/pkg/cli/list/list.go index 4307a68f..e1fae8f8 100644 --- a/go/pkg/cli/list/list.go +++ b/go/pkg/cli/list/list.go @@ -53,7 +53,7 @@ func (exp *ListExperiment) GetValue(name string) param.Value { } if name == "step" { if exp.LatestCheckpoint != nil { - return param.Int(exp.LatestCheckpoint.Step) + return param.Int(int64(exp.LatestCheckpoint.Step)) } return param.Int(0) } @@ -84,7 +84,7 @@ func (exp *ListExperiment) GetValue(name string) param.Value { } func Experiments(repo repository.Repository, format Format, all bool, filters *param.Filters, sorter *param.Sorter) error { - proj := project.NewProject(repo) + proj := project.NewProject(repo, "") listExperiments, err := createListExperiments(proj, filters) if err != nil { return err @@ -209,7 +209,7 @@ func outputTable(experiments []*ListExperiment, all bool) error { latestCheckpoint := "" if exp.LatestCheckpoint != nil { - latestCheckpoint = fmt.Sprintf("%s (step %s)", exp.LatestCheckpoint.ShortID(), strconv.Itoa(exp.LatestCheckpoint.Step)) + latestCheckpoint = fmt.Sprintf("%s (step %s)", exp.LatestCheckpoint.ShortID(), strconv.FormatInt(exp.LatestCheckpoint.Step, 10)) } fmt.Fprintf(tw, "%s\t", latestCheckpoint) @@ -227,7 +227,7 @@ func outputTable(experiments []*ListExperiment, all bool) error { bestCheckpoint := "" if exp.BestCheckpoint != nil { - bestCheckpoint = fmt.Sprintf("%s (step %s)", exp.BestCheckpoint.ShortID(), strconv.Itoa(exp.BestCheckpoint.Step)) + bestCheckpoint = fmt.Sprintf("%s (step %s)", exp.BestCheckpoint.ShortID(), strconv.FormatInt(exp.BestCheckpoint.Step, 10)) } fmt.Fprintf(tw, "%s\t", bestCheckpoint) diff --git a/go/pkg/cli/rm.go b/go/pkg/cli/rm.go index 9d4262fc..85240bb6 100644 --- a/go/pkg/cli/rm.go +++ b/go/pkg/cli/rm.go @@ -46,7 +46,7 @@ func removeExperimentOrCheckpoint(cmd *cobra.Command, prefixes []string) error { if err != nil { return err } - proj := project.NewProject(repo) + proj := project.NewProject(repo, projectDir) if err != nil { return err } diff --git a/go/pkg/cli/show.go b/go/pkg/cli/show.go index a45ef03c..679940b8 100644 --- a/go/pkg/cli/show.go +++ b/go/pkg/cli/show.go @@ -53,7 +53,7 @@ func show(opts showOpts, args []string, out io.Writer) error { if err != nil { return err } - proj := project.NewProject(repo) + proj := project.NewProject(repo, projectDir) result, err := proj.CheckpointOrExperimentFromPrefix(prefix) if err != nil { return err @@ -139,7 +139,7 @@ func showExperiment(au aurora.Aurora, out io.Writer, proj *project.Project, exp fmt.Fprintf(cw, "%s\n", strings.Join(headings, "\t")) for _, checkpoint := range exp.Checkpoints { - columns := []string{checkpoint.ShortID(), strconv.Itoa(checkpoint.Step), console.FormatTime(checkpoint.Created)} + columns := []string{checkpoint.ShortID(), strconv.FormatInt(checkpoint.Step, 10), console.FormatTime(checkpoint.Created)} for _, label := range labelNames { val := checkpoint.Metrics[label] s := val.ShortString(10, 5) diff --git a/go/pkg/cli/show_test.go b/go/pkg/cli/show_test.go index 1dda0943..6faee5b9 100644 --- a/go/pkg/cli/show_test.go +++ b/go/pkg/cli/show_test.go @@ -125,7 +125,7 @@ func TestShowCheckpoint(t *testing.T) { conf := &config.Config{} repo := createShowTestData(t, workingDir, conf) - proj := project.NewProject(repo) + proj := project.NewProject(repo, workingDir) result, err := proj.CheckpointOrExperimentFromPrefix("3cc") require.NoError(t, err) require.NotNil(t, result.Checkpoint) @@ -187,7 +187,7 @@ func TestShowExperiment(t *testing.T) { conf := &config.Config{} repo := createShowTestData(t, workingDir, conf) - proj := project.NewProject(repo) + proj := project.NewProject(repo, workingDir) result, err := proj.CheckpointOrExperimentFromPrefix("1eee") require.NoError(t, err) require.NotNil(t, result.Experiment) diff --git a/go/pkg/config/load.go b/go/pkg/config/load.go index 18b919f6..691d6538 100644 --- a/go/pkg/config/load.go +++ b/go/pkg/config/load.go @@ -12,6 +12,7 @@ import ( "github.com/ghodss/yaml" "github.com/replicate/replicate/go/pkg/console" + "github.com/replicate/replicate/go/pkg/errors" "github.com/replicate/replicate/go/pkg/files" "github.com/replicate/replicate/go/pkg/global" ) @@ -19,19 +20,6 @@ import ( const maxSearchDepth = 100 const deprecatedRepositoryDir = ".replicate/storage" -type configNotFoundError struct { - message string -} - -func (e *configNotFoundError) Error() string { - return e.message + ` - -You must either create a replicate.yaml configuration file, or explicitly pass the arguments 'repository' and 'directory' to replicate.Project(). - -For more information, see https://replicate.ai/docs/reference/python""" -` -} - // FindConfigInWorkingDir searches working directory and any parent directories // for replicate.yaml (or replicate.yml) and loads it. // @@ -43,8 +31,7 @@ func FindConfigInWorkingDir(overrideDir string) (conf *Config, projectDir string if overrideDir != "" { conf, err := LoadConfig(path.Join(overrideDir, global.ConfigFilenames[0])) if err != nil { - if _, ok := err.(*configNotFoundError); ok { - + if errors.IsConfigNotFound(err) { // Try to locate replicate.yml conf, err := LoadConfig(path.Join(overrideDir, global.ConfigFilenames[1])) if err != nil { @@ -97,7 +84,7 @@ func LoadConfig(configPath string) (conf *Config, err error) { text, err := ioutil.ReadFile(configPath) if err != nil { if os.IsNotExist(err) { - return nil, &configNotFoundError{fmt.Sprintf("The config path does not exist: %s", configPath)} + return nil, errors.ConfigNotFound("The config path does not exist: " + configPath) } return nil, fmt.Errorf("Failed to read config file '%s': %w", configPath, err) } @@ -174,10 +161,10 @@ func FindConfigPath(startFolder string) (configPath string, deprecatedRepository if folder == "/" { // These error messages aren't used anywhere, but I've left them in in case this function is used elsewhere in the future - return "", "", &configNotFoundError{message: fmt.Sprintf("%s not found in %s (or in any parent directories", global.ConfigFilenames[0], startFolder)} + return "", "", errors.ConfigNotFound(fmt.Sprintf("%s not found in %s (or in any parent directories", global.ConfigFilenames[0], startFolder)) } folder = filepath.Dir(folder) } - return "", "", &configNotFoundError{message: fmt.Sprintf("%s not found, recursive reached max depth", global.ConfigFilenames[0])} + return "", "", errors.ConfigNotFound(fmt.Sprintf("%s not found, recursive reached max depth", global.ConfigFilenames[0])) } diff --git a/go/pkg/errors/errors.go b/go/pkg/errors/errors.go new file mode 100644 index 00000000..42c1bf1d --- /dev/null +++ b/go/pkg/errors/errors.go @@ -0,0 +1,87 @@ +package errors + +import ( + "fmt" +) + +const ( + CodeDoesNotExist = "DOES_NOT_EXIST" + CodeReadError = "READ_ERROR" + CodeWriteError = "WRITE_ERROR" + CodeRepositoryConfigurationError = "REPOSITORY_CONFIGURATION_ERROR" + CodeIncompatibleRepositoryVersion = "INCOMPATIBLE_REPOSITORY_VERSION" + CodeCorruptedRepositorySpec = "CORRUPTED_REPOSITORY_SPEC" + CodeConfigNotFound = "CONFIG_NOT_FOUND" +) + +// TODO: support wrapping https://blog.golang.org/go1.13-errors +type CodedError interface { + Code() string +} + +type codedError struct { + code string + msg string +} + +func (e *codedError) Error() string { + return e.msg +} + +func (e *codedError) Code() string { + return e.code +} + +func IsDoesNotExist(err error) bool { + return Code(err) == CodeDoesNotExist +} + +func IsConfigNotFound(err error) bool { + return Code(err) == CodeConfigNotFound +} + +func DoesNotExist(msg string) error { return &codedError{code: CodeDoesNotExist, msg: msg} } +func ReadError(msg string) error { return &codedError{code: CodeReadError, msg: msg} } +func WriteError(msg string) error { return &codedError{code: CodeWriteError, msg: msg} } +func RepositoryConfigurationError(msg string) error { + return &codedError{code: CodeRepositoryConfigurationError, msg: msg} +} + +func ConfigNotFound(msg string) error { + return &codedError{ + code: CodeConfigNotFound, + msg: msg + ` + +You must either create a replicate.yaml configuration file, or explicitly pass the arguments 'repository' and 'directory' to replicate.Project(). + +For more information, see https://replicate.ai/docs/reference/python""" +`, + } +} + +func IncompatibleRepositoryVersion(rootURL string) error { + return &codedError{ + code: CodeIncompatibleRepositoryVersion, + msg: `The repository at ` + rootURL + ` is using a newer storage mechanism which is incompatible with your version of Replicate. + +To upgrade, run: +pip install --upgrade replicate`, + } +} + +func CorruptedRepositorySpec(rootURL string, specPath string, err error) error { + return &codedError{ + code: CodeCorruptedRepositorySpec, + msg: fmt.Sprintf(`The project spec file at %s/%s is corrupted (%v). + +You can manually edit it with the format {"version": VERSION}, +where VERSION is an integer.`, rootURL, specPath, err), + } +} + +func Code(err error) string { + if cerr, ok := err.(CodedError); ok { + return cerr.Code() + } + return "" +} diff --git a/go/pkg/param/value.go b/go/pkg/param/value.go index 97c09664..5a16ab4e 100644 --- a/go/pkg/param/value.go +++ b/go/pkg/param/value.go @@ -18,12 +18,18 @@ const ( TypeBool Type = "bool" TypeObject Type = "object" TypeNone Type = "none" + + // hack in nan, +inf and -inf since json doesn't support + // them natively. + JsonNaN = `"[NaN]"` + JsonPositiveInfinity = `"[+Infinity]"` + JsonNegativeInfinity = `"[-Infinity]"` ) // TODO(bfirsh): could complexity be reduced here if it were implemented as interface{}? type Value struct { - intVal *int + intVal *int64 floatVal *float64 stringVal *string boolVal *bool @@ -39,6 +45,15 @@ func (v Value) MarshalJSON() ([]byte, error) { case v.intVal != nil: return json.Marshal(v.intVal) case v.floatVal != nil: + if math.IsNaN(*v.floatVal) { + return []byte(JsonNaN), nil + } + if math.IsInf(*v.floatVal, 1) { + return []byte(JsonPositiveInfinity), nil + } + if math.IsInf(*v.floatVal, -1) { + return []byte(JsonNegativeInfinity), nil + } return json.Marshal(v.floatVal) case v.stringVal != nil: return json.Marshal(v.stringVal) @@ -55,7 +70,7 @@ func (v Value) MarshalJSON() ([]byte, error) { func (v *Value) UnmarshalJSON(data []byte) error { // FIXME(bfirsh): this might be more robust if it unmarshalled to interface{} // then we used reflect? the error returned from json.Unmarshal might be other things - if i := new(int); json.Unmarshal(data, i) == nil { + if i := new(int64); json.Unmarshal(data, i) == nil { v.intVal = i return nil } @@ -71,6 +86,21 @@ func (v *Value) UnmarshalJSON(data []byte) error { v.isNone = true return nil } + if string(data) == JsonNaN { + f := math.NaN() + v.floatVal = &f + return nil + } + if string(data) == JsonPositiveInfinity { + f := math.Inf(1) + v.floatVal = &f + return nil + } + if string(data) == JsonNegativeInfinity { + f := math.Inf(-1) + v.floatVal = &f + return nil + } if s := new(string); json.Unmarshal(data, s) == nil { v.stringVal = s return nil @@ -83,11 +113,12 @@ func (v *Value) UnmarshalJSON(data []byte) error { func ParseFromString(s string) Value { data := []byte(s) v := Value{} + if s == "null" || s == "None" { v.isNone = true return v } - if i := new(int); json.Unmarshal(data, i) == nil { + if i := new(int64); json.Unmarshal(data, i) == nil { v.intVal = i return v } @@ -190,7 +221,7 @@ func (v Value) BoolVal() bool { return *v.boolVal } -func (v Value) IntVal() int { +func (v Value) IntVal() int64 { if v.Type() != TypeInt { panic(fmt.Sprintf("Can't use %s as int", v)) } @@ -369,7 +400,7 @@ func Bool(v bool) Value { return Value{boolVal: &v} } -func Int(v int) Value { +func Int(v int64) Value { return Value{intVal: &v} } diff --git a/go/pkg/project/checkout.go b/go/pkg/project/checkout.go new file mode 100644 index 00000000..9cd79670 --- /dev/null +++ b/go/pkg/project/checkout.go @@ -0,0 +1,116 @@ +package project + +import ( + "fmt" + "path" + "path/filepath" + + "github.com/replicate/replicate/go/pkg/console" + "github.com/replicate/replicate/go/pkg/errors" +) + +func (p *Project) CheckoutCheckpoint(checkpoint *Checkpoint, experiment *Experiment, outputDir string, quiet bool) error { + experimentFilesExist := true + checkpointFilesExist := true + + if err := p.repository.GetPathTar(path.Join("experiments", experiment.ID+".tar.gz"), outputDir); err != nil { + // Ignore does not exist errors + if errors.IsDoesNotExist(err) { + console.Debug("No experiment data found") + experimentFilesExist = false + } else { + return err + } + } else { + if !quiet { + console.Info("Copied the files from experiment %s to %q", experiment.ShortID(), filepath.Join(outputDir, experiment.Path)) + } + } + + // Overlay checkpoint on top of experiment + if checkpoint != nil { + + if err := p.repository.GetPathTar(path.Join("checkpoints", checkpoint.ID+".tar.gz"), outputDir); err != nil { + if errors.IsDoesNotExist(err) { + console.Debug("No checkpoint data found") + checkpointFilesExist = false + } else { + return err + + } + } else { + if !quiet { + console.Info("Copied the files from checkpoint %s to %q", checkpoint.ShortID(), filepath.Join(outputDir, checkpoint.Path)) + } + } + + } + + if !experimentFilesExist && !checkpointFilesExist { + // Just an experiment, no checkpoints + if checkpoint == nil { + return fmt.Errorf("The experiment %s does not have any files associated with it. You need to pass the 'path' argument to 'init()' to check out files.", experiment.ShortID()) + } + return errors.DoesNotExist(fmt.Sprintf("Neither the experiment %s nor the checkpoint %s has any files associated with it. You need to pass the 'path' argument to 'init()' or 'checkpoint()' to check out files.", experiment.ShortID(), checkpoint.ShortID())) + } + + if !quiet { + console.Info(`If you want to run this experiment again, this is how it was run: + + ` + experiment.Command + ` +`) + } + + return nil +} + +// checkout all the files from an experiment or checkpoint +func (p *Project) CheckoutFileOrDirectory(checkpoint *Checkpoint, experiment *Experiment, outputDir string, checkoutPath string) error { + // Extract the tarfile + experimentFilesExist := true + checkpointFilesExist := true + + if err := p.repository.GetPathItemTar(filepath.Join("experiments", experiment.ID+".tar.gz"), checkoutPath, outputDir); err != nil { + // Ignore does not exist errors + if errors.IsDoesNotExist(err) { + console.Debug("No experiment data found") + experimentFilesExist = false + } else { + return err + } + } else { + console.Info("Copied the path %s from experiment %s to %q", checkoutPath, experiment.ShortID(), filepath.Join(outputDir, experiment.Path)) + } + + // Overlay checkpoint on top of experiment + if checkpoint != nil { + + if err := p.repository.GetPathItemTar(filepath.Join("checkpoints", checkpoint.ID+".tar.gz"), checkoutPath, outputDir); err != nil { + if errors.IsDoesNotExist(err) { + console.Debug("No checkpoint data found") + checkpointFilesExist = false + } else { + return err + + } + } else { + console.Info("Copied the path %s from checkpoint %s to %q", checkoutPath, checkpoint.ShortID(), filepath.Join(outputDir, checkpoint.Path)) + } + + } + + if !experimentFilesExist && !checkpointFilesExist { + // Just an experiment, no checkpoints + if checkpoint == nil { + return fmt.Errorf("The experiment %s does not have the path %s associated with it. You need to pass the 'path' argument to 'init()' to check out files.", experiment.ShortID(), checkoutPath) + } + return fmt.Errorf("Neither the experiment %s nor the checkpoint %s has the path %s associated with it. You need to pass the 'path' argument to 'init()' or 'checkpoint()' to check out files.", experiment.ShortID(), checkpoint.ShortID(), checkoutPath) + } + + console.Info(`If you want to run this experiment again, this is how it was run: + + ` + experiment.Command + ` +`) + + return nil +} diff --git a/go/pkg/project/checkpoint.go b/go/pkg/project/checkpoint.go index a55e91cf..45a81f67 100644 --- a/go/pkg/project/checkpoint.go +++ b/go/pkg/project/checkpoint.go @@ -25,7 +25,7 @@ type Checkpoint struct { ID string `json:"id"` Created time.Time `json:"created"` Metrics param.ValueMap `json:"metrics"` - Step int `json:"step"` + Step int64 `json:"step"` Path string `json:"path"` PrimaryMetric *PrimaryMetric `json:"primary_metric"` } diff --git a/go/pkg/project/heartbeat.go b/go/pkg/project/heartbeat.go index be208484..da0483d7 100644 --- a/go/pkg/project/heartbeat.go +++ b/go/pkg/project/heartbeat.go @@ -34,6 +34,10 @@ func CreateHeartbeat(repo repository.Repository, experimentID string, t time.Tim return repo.Put(path.Join("metadata", "heartbeats", experimentID+".json"), data) } +func DeleteHeartbeat(repo repository.Repository, experimentID string) error { + return repo.Delete(path.Join("metadata", "heartbeats", experimentID+".json")) +} + func listHeartbeats(repo repository.Repository) ([]*Heartbeat, error) { paths, err := repo.List("metadata/heartbeats/") if err != nil { diff --git a/go/pkg/project/project.go b/go/pkg/project/project.go index 6adb63d2..0ca07689 100644 --- a/go/pkg/project/project.go +++ b/go/pkg/project/project.go @@ -3,24 +3,35 @@ package project import ( "encoding/json" "fmt" + "math/rand" + "os/user" "strings" + "time" + "github.com/replicate/replicate/go/pkg/config" "github.com/replicate/replicate/go/pkg/console" + "github.com/replicate/replicate/go/pkg/errors" + "github.com/replicate/replicate/go/pkg/global" + "github.com/replicate/replicate/go/pkg/param" "github.com/replicate/replicate/go/pkg/repository" ) +const IDLength = 64 + // Project is essentially a data access object for retrieving // metadata objects type Project struct { repository repository.Repository + directory string experimentsByID map[string]*Experiment heartbeatsByExpID map[string]*Heartbeat hasLoaded bool } -func NewProject(repo repository.Repository) *Project { +func NewProject(repo repository.Repository, directory string) *Project { return &Project{ repository: repo, + directory: directory, hasLoaded: false, } } @@ -37,6 +48,8 @@ func (p *Project) Experiments() ([]*Experiment, error) { return experiments, nil } +// ExperimentIsRunning returns true if an experiment is still running +// (i.e. the heartbeat has beat in the last n seconds). func (p *Project) ExperimentIsRunning(experimentID string) (bool, error) { if err := p.ensureLoaded(); err != nil { return false, err @@ -50,6 +63,77 @@ func (p *Project) ExperimentIsRunning(experimentID string) (bool, error) { return heartbeat.IsRunning(), nil } +// ExperimentFromPrefix returns an experiment that matches a given ID prefix. +func (p *Project) ExperimentFromPrefix(prefix string) (*Experiment, error) { + if err := p.ensureLoaded(); err != nil { + return nil, err + } + + matches := []*Experiment{} + + for id := range p.experimentsByID { + exp := p.experimentsByID[id] + if strings.HasPrefix(id, prefix) { + matches = append(matches, exp) + } + } + + if len(matches) == 0 { + return nil, errors.DoesNotExist("Experiment not found: " + prefix) + } + if len(matches) > 1 { + return nil, fmt.Errorf("Prefix is ambiguous: %s (%d matching experiments)", prefix, len(matches)) + } + return matches[0], nil +} + +// ExperimentByID returns an experiment that matches a given ID. +func (p *Project) ExperimentByID(id string) (*Experiment, error) { + if err := p.ensureLoaded(); err != nil { + return nil, err + } + if exp, ok := p.experimentsByID[id]; ok { + return exp, nil + } + return nil, fmt.Errorf("Experiment not found: %s", id) +} + +// CheckpointFromPrefix returns an experiment that matches a given ID prefix. +func (p *Project) CheckpointFromPrefix(prefix string) (*Checkpoint, *Experiment, error) { + if err := p.ensureLoaded(); err != nil { + return nil, nil, err + } + + type match struct { + checkpoint *Checkpoint + experiment *Experiment + } + + matches := []match{} + + for id := range p.experimentsByID { + exp := p.experimentsByID[id] + for _, checkpoint := range exp.Checkpoints { + if strings.HasPrefix(checkpoint.ID, prefix) { + matches = append(matches, match{ + checkpoint: checkpoint, + experiment: exp, + }) + } + } + } + + if len(matches) == 0 { + return nil, nil, fmt.Errorf("Checkpoint not found: %s", prefix) + } + if len(matches) > 1 { + return nil, nil, fmt.Errorf("Prefix is ambiguous: %s (%d matching checkpoints)", prefix, len(matches)) + } + + m := matches[0] + return m.checkpoint, m.experiment, nil +} + type CheckpointOrExperiment struct { Checkpoint *Checkpoint Experiment *Experiment @@ -86,29 +170,180 @@ func (p *Project) CheckpointOrExperimentFromPrefix(prefix string) (*CheckpointOr return matches[0], nil } -func (p *Project) DeleteCheckpoint(com *Checkpoint) error { - if err := p.repository.Delete(com.StorageTarPath()); err != nil { - console.Warn("Failed to delete checkpoint storage directory %s: %s", com.StorageTarPath(), err) +func (p *Project) DeleteCheckpoint(chk *Checkpoint) error { + if err := p.repository.Delete(chk.StorageTarPath()); err != nil { + console.Warn("Failed to delete checkpoint storage directory %s: %s", chk.StorageTarPath(), err) } + p.invalidateCache() return nil } func (p *Project) DeleteExperiment(exp *Experiment) error { + console.Debug("Deleting experiment: %s", exp.ShortID()) if err := p.repository.Delete(exp.HeartbeatPath()); err != nil { console.Warn("Failed to delete heartbeat file %s: %s", exp.HeartbeatPath(), err) } if err := p.repository.Delete(exp.StorageTarPath()); err != nil { - console.Warn("Failed to delete checkpoint storage directory %s: %s", exp.StorageTarPath(), err) + console.Warn("Failed to delete experiment storage directory %s: %s", exp.StorageTarPath(), err) } if err := p.repository.Delete(exp.MetadataPath()); err != nil { console.Warn("Failed to delete experiment metadata file %s: %s", exp.MetadataPath(), err) } + p.invalidateCache() + return nil +} + +type CreateExperimentArgs struct { + Path string + Command string + Params map[string]param.Value + PythonPackages map[string]string +} + +func (p *Project) CreateExperiment(args CreateExperimentArgs, async bool, workChan chan func() error, quiet bool) (*Experiment, error) { + spec, err := repository.LoadSpec(p.repository) + if err != nil { + return nil, err + } + if spec == nil { + if err := repository.WriteSpec(p.repository); err != nil { + return nil, err + } + } else if spec.Version > repository.Version { + return nil, errors.IncompatibleRepositoryVersion(p.repository.RootURL()) + } + + host := "" // currently disabled and unused + currentUser, err := user.Current() + username := "" + if err == nil { + username = currentUser.Username + } else { + console.Warn("Failed to determine username: %s", err) + } + conf := &config.Config{Repository: p.repository.RootURL()} + + exp := &Experiment{ + ID: generateRandomID(), + Created: time.Now().UTC(), + Params: args.Params, + Host: host, + User: username, + Config: conf, + Command: args.Command, + Path: args.Path, + PythonPackages: args.PythonPackages, + ReplicateVersion: global.Version, + } + + // save json synchronously to uncover repository write issues + if _, err := p.SaveExperiment(exp, false); err != nil { + if !quiet { + console.Info("Creating experiment %s", exp.ShortID()) + } + return nil, err + } + + if !quiet { + console.Info("Creating experiment %s, copying '%s' to '%s'...", exp.ShortID(), exp.Path, p.repository.RootURL()) + } + + work := func() error { return nil } + if exp.Path != "" { + work = func() error { + if err := p.repository.PutPathTar(p.directory, exp.StorageTarPath(), exp.Path); err != nil { + return err + } + return nil + } + } + + if async { + workChan <- work + } else { + if err := work(); err != nil { + return nil, err + } + } + return exp, nil +} + +type CreateCheckpointArgs struct { + Path string + Step int64 + Metrics map[string]param.Value + PrimaryMetric *PrimaryMetric +} + +func (p *Project) CreateCheckpoint(args CreateCheckpointArgs, async bool, workChan chan func() error, quiet bool) (*Checkpoint, error) { + chk := &Checkpoint{ + ID: generateRandomID(), + Created: time.Now().UTC(), + Metrics: args.Metrics, + Step: args.Step, + Path: args.Path, + PrimaryMetric: args.PrimaryMetric, + } + + // if path is empty (i.e. it was None in python), just return + // the checkpoint without saving anything + if chk.Path == "" { + if !quiet { + console.Info("Creating checkpoint %s", chk.ShortID()) + } + return chk, nil + } + + if !quiet { + console.Info("Creating checkpoint %s, copying '%s' to '%s'...", chk.ShortID(), chk.Path, p.repository.RootURL()) + } + + work := func() error { + if err := p.repository.PutPathTar(p.directory, chk.StorageTarPath(), chk.Path); err != nil { + return err + } + return nil + } + if async { + workChan <- work + } else { + if err := work(); err != nil { + return nil, err + } + } + + return chk, nil +} + +func (p *Project) SaveExperiment(exp *Experiment, quiet bool) (*Experiment, error) { + // TODO(andreas): use quiet flag + if err := exp.Save(p.repository); err != nil { + return nil, err + } + p.invalidateCache() + return exp, nil +} + +func (p *Project) RefreshHeartbeat(experimentID string) error { + return CreateHeartbeat(p.repository, experimentID, time.Now().UTC()) +} + +func (p *Project) StopExperiment(experimentID string) error { + if err := DeleteHeartbeat(p.repository, experimentID); err != nil { + return err + } + p.invalidateCache() return nil } +func (p *Project) invalidateCache() { + p.hasLoaded = false +} + // ensureLoaded eagerly loads all the metadata for this project. // This is highly inefficient, see https://github.com/replicate/replicate/issues/305 func (p *Project) ensureLoaded() error { + // TODO(andreas): 5(?) second caching instead if p.hasLoaded { return nil } @@ -147,3 +382,20 @@ func loadFromPath(repo repository.Repository, path string, obj interface{}) erro } return nil } + +// TODO(andreas): even though this random generator isn't affected by +// python's random seed, it might still be a good idea to include a +// timestamp or something else to ensure uniqueness in case you +// use the Go API directly. +func generateRandomID() string { + chars := []rune("0123456789abcdef") + var b strings.Builder + for i := 0; i < IDLength; i++ { + _, err := b.WriteRune(chars[rand.Intn(len(chars))]) + if err != nil { + // should never happen! + panic(err) + } + } + return b.String() +} diff --git a/go/pkg/repository/cached.go b/go/pkg/repository/cached.go index 681c863a..8858a277 100644 --- a/go/pkg/repository/cached.go +++ b/go/pkg/repository/cached.go @@ -1,7 +1,6 @@ package repository import ( - "path" "strings" "github.com/replicate/replicate/go/pkg/console" @@ -22,7 +21,7 @@ type CachedRepository struct { isSynced bool } -func NewCachedRepository(repo Repository, cachePrefix string, cacheDir string) (*CachedRepository, error) { +func NewCachedRepository(repo Repository, cachePrefix string, projectDir string, cacheDir string) (*CachedRepository, error) { // This doesn't actually return an error, but catch in case of future errors cacheRepository, err := NewDiskRepository(cacheDir) if err != nil { @@ -39,8 +38,8 @@ func NewCachedRepository(repo Repository, cachePrefix string, cacheDir string) ( // NewCachedMetadataRepository returns a CachedRepository that caches the metadata/ path in // .replicate/metadata-cache in a source dir -func NewCachedMetadataRepository(repo Repository, projectDir string) (*CachedRepository, error) { - return NewCachedRepository(repo, "metadata", path.Join(projectDir, ".replicate/metadata-cache")) +func NewCachedMetadataRepository(projectDir string, repo Repository) (*CachedRepository, error) { + return NewCachedRepository(repo, "metadata", projectDir, ".replicate/metadata-cache") } func (s *CachedRepository) Get(p string) ([]byte, error) { diff --git a/go/pkg/repository/disk.go b/go/pkg/repository/disk.go index 78b7756d..18dff0cd 100644 --- a/go/pkg/repository/disk.go +++ b/go/pkg/repository/disk.go @@ -6,12 +6,13 @@ import ( "io" "io/ioutil" "os" - "path" + pathpkg "path" "path/filepath" "strings" "github.com/otiai10/copy" + "github.com/replicate/replicate/go/pkg/errors" "github.com/replicate/replicate/go/pkg/files" ) @@ -30,18 +31,18 @@ func (s *DiskRepository) RootURL() string { } // Get data at path -func (s *DiskRepository) Get(p string) ([]byte, error) { - data, err := ioutil.ReadFile(path.Join(s.rootDir, p)) +func (s *DiskRepository) Get(path string) ([]byte, error) { + data, err := ioutil.ReadFile(pathpkg.Join(s.rootDir, path)) if err != nil && os.IsNotExist(err) { - return nil, &DoesNotExistError{msg: "Get: path does not exist: " + p} + return nil, errors.DoesNotExist(fmt.Sprintf("Get: path does not exist: %v", path)) } return data, err } // GetPath recursively copies repoDir to localDir func (s *DiskRepository) GetPath(repoDir string, localDir string) error { - if err := copy.Copy(path.Join(s.rootDir, repoDir), localDir); err != nil { - return fmt.Errorf("Failed to copy directory from %s to %s: %w", repoDir, localDir, err) + if err := copy.Copy(pathpkg.Join(s.rootDir, repoDir), localDir); err != nil { + return errors.ReadError(fmt.Sprintf("Failed to copy directory from %s to %s: %v", repoDir, localDir, err)) } return nil } @@ -50,53 +51,59 @@ func (s *DiskRepository) GetPath(repoDir string, localDir string) error { // // See repository.go for full documentation. func (s *DiskRepository) GetPathTar(tarPath, localPath string) error { - fullTarPath := path.Join(s.rootDir, tarPath) + fullTarPath := pathpkg.Join(s.rootDir, tarPath) exists, err := files.FileExists(fullTarPath) if err != nil { return err } if !exists { - return &DoesNotExistError{msg: "GetPathTar: does not exist: " + fullTarPath} + return errors.DoesNotExist(fmt.Sprintf("Path does not exist: " + fullTarPath)) } - return extractTar(fullTarPath, localPath) + if err := extractTar(fullTarPath, localPath); err != nil { + return err + } + return nil } func (s *DiskRepository) GetPathItemTar(tarPath, itemPath, localPath string) error { - fullTarPath := path.Join(s.rootDir, tarPath) + fullTarPath := pathpkg.Join(s.rootDir, tarPath) exists, err := files.FileExists(fullTarPath) if err != nil { return err } if !exists { - return &DoesNotExistError{msg: "GetPathItemTar: does not exist: " + fullTarPath} + return errors.DoesNotExist("Path does not exist: " + fullTarPath) } return extractTarItem(fullTarPath, itemPath, localPath) } // Put data at path -func (s *DiskRepository) Put(p string, data []byte) error { - fullPath := path.Join(s.rootDir, p) +func (s *DiskRepository) Put(path string, data []byte) error { + fullPath := pathpkg.Join(s.rootDir, path) err := os.MkdirAll(filepath.Dir(fullPath), 0755) if err != nil { - return err + return errors.WriteError(err.Error()) } - return ioutil.WriteFile(fullPath, data, 0644) + if err := ioutil.WriteFile(fullPath, data, 0644); err != nil { + return errors.WriteError(err.Error()) + } + return nil } // PutPath recursively puts the local `localPath` directory into path `repoPath` in the repository func (s *DiskRepository) PutPath(localPath string, repoPath string) error { files, err := getListOfFilesToPut(localPath, repoPath) if err != nil { - return err + return errors.WriteError(err.Error()) } for _, file := range files { data, err := ioutil.ReadFile(file.Source) if err != nil { - return err + return errors.WriteError(err.Error()) } err = s.Put(file.Dest, data) if err != nil { - return err + return errors.WriteError(err.Error()) } } return nil @@ -108,18 +115,18 @@ func (s *DiskRepository) PutPath(localPath string, repoPath string) error { // See repository.go for full documentation. func (s *DiskRepository) PutPathTar(localPath, tarPath, includePath string) error { if !strings.HasSuffix(tarPath, ".tar.gz") { - return fmt.Errorf("PutPathTar: tarPath must end with .tar.gz") + return errors.WriteError("PutPathTar: tarPath must end with .tar.gz") } - fullPath := path.Join(s.rootDir, tarPath) + fullPath := pathpkg.Join(s.rootDir, tarPath) err := os.MkdirAll(filepath.Dir(fullPath), 0755) if err != nil { - return err + return errors.WriteError(err.Error()) } tarFile, err := os.Create(fullPath) if err != nil { - return err + return errors.WriteError(err.Error()) } defer tarFile.Close() @@ -128,14 +135,17 @@ func (s *DiskRepository) PutPathTar(localPath, tarPath, includePath string) erro } // Explicitly call Close() on success to capture error - return tarFile.Close() + if err := tarFile.Close(); err != nil { + return errors.WriteError(err.Error()) + } + return nil } // Delete deletes path. If path is a directory, it recursively deletes // all everything under path func (s *DiskRepository) Delete(pathToDelete string) error { - if err := os.RemoveAll(path.Join(s.rootDir, pathToDelete)); err != nil { - return fmt.Errorf("Failed to delete %s/%s: %w", s.rootDir, pathToDelete, err) + if err := os.RemoveAll(pathpkg.Join(s.rootDir, pathToDelete)); err != nil { + return errors.WriteError(fmt.Sprintf("Failed to delete %s/%s: %v", s.rootDir, pathToDelete, err)) } return nil } @@ -145,31 +155,31 @@ func (s *DiskRepository) Delete(pathToDelete string) error { // Returns a list of paths, prefixed with the given path, that can be passed straight to Get(). // Directories are not listed. // If path does not exist, an empty list will be returned. -func (s *DiskRepository) List(p string) ([]string, error) { - files, err := ioutil.ReadDir(path.Join(s.rootDir, p)) +func (s *DiskRepository) List(path string) ([]string, error) { + files, err := ioutil.ReadDir(pathpkg.Join(s.rootDir, path)) if err != nil { if os.IsNotExist(err) { return []string{}, nil } - return nil, err + return nil, errors.ReadError(err.Error()) } result := []string{} for _, f := range files { if !f.IsDir() { - result = append(result, path.Join(p, f.Name())) + result = append(result, pathpkg.Join(path, f.Name())) } } return result, nil } func (s *DiskRepository) ListTarFile(tarPath string) ([]string, error) { - fullTarPath := path.Join(s.rootDir, tarPath) + fullTarPath := pathpkg.Join(s.rootDir, tarPath) exists, err := files.FileExists(fullTarPath) if err != nil { return nil, err } if !exists { - return nil, &DoesNotExistError{msg: "Path does not exist: " + fullTarPath} + return nil, errors.DoesNotExist("Path does not exist: " + fullTarPath) } files, err := getListOfFilesInTar(fullTarPath) @@ -186,7 +196,7 @@ func (s *DiskRepository) ListTarFile(tarPath string) ([]string, error) { } func (s *DiskRepository) ListRecursive(results chan<- ListResult, folder string) { - err := filepath.Walk(path.Join(s.rootDir, folder), func(path string, info os.FileInfo, err error) error { + err := filepath.Walk(pathpkg.Join(s.rootDir, folder), func(path string, info os.FileInfo, err error) error { if err != nil { return err } @@ -211,13 +221,13 @@ func (s *DiskRepository) ListRecursive(results chan<- ListResult, folder string) close(results) return } - results <- ListResult{Error: err} + results <- ListResult{Error: errors.ReadError(err.Error())} } close(results) } func (s *DiskRepository) MatchFilenamesRecursive(results chan<- ListResult, folder string, filename string) { - err := filepath.Walk(path.Join(s.rootDir, folder), func(path string, info os.FileInfo, err error) error { + err := filepath.Walk(pathpkg.Join(s.rootDir, folder), func(path string, info os.FileInfo, err error) error { if err != nil { return err } @@ -238,7 +248,7 @@ func (s *DiskRepository) MatchFilenamesRecursive(results chan<- ListResult, fold return } - results <- ListResult{Error: err} + results <- ListResult{Error: errors.ReadError(err.Error())} } close(results) } diff --git a/go/pkg/repository/disk_test.go b/go/pkg/repository/disk_test.go index 8ff5790d..f01b6c5a 100644 --- a/go/pkg/repository/disk_test.go +++ b/go/pkg/repository/disk_test.go @@ -9,6 +9,7 @@ import ( "github.com/stretchr/testify/require" + "github.com/replicate/replicate/go/pkg/errors" "github.com/replicate/replicate/go/pkg/files" ) @@ -24,7 +25,7 @@ func TestDiskRepositoryGet(t *testing.T) { require.NoError(t, err) _, err = repository.Get("does-not-exist") - require.IsType(t, &DoesNotExistError{}, err) + require.True(t, errors.IsDoesNotExist(err)) content, err := repository.Get("some-file") require.NoError(t, err) @@ -42,7 +43,7 @@ func TestDiskGetPathTar(t *testing.T) { tmpDir, err := files.TempDir("test") require.NoError(t, err) err = repository.GetPathTar("does-not-exist.tar.gz", tmpDir) - require.IsType(t, &DoesNotExistError{}, err) + require.True(t, errors.IsDoesNotExist(err)) } func TestDiskGetPathItemTar(t *testing.T) { @@ -101,7 +102,7 @@ func TestDiskGetPathItemTar(t *testing.T) { // Extract a file that does not exist err = repository.GetPathItemTar("temp.tar.gz", "does-not-exist.txt", tmpDir) - require.IsType(t, &DoesNotExistError{}, err) + require.True(t, errors.IsDoesNotExist(err)) } func TestDiskRepositoryPut(t *testing.T) { diff --git a/go/pkg/repository/errors.go b/go/pkg/repository/errors.go deleted file mode 100644 index 2ecb2f07..00000000 --- a/go/pkg/repository/errors.go +++ /dev/null @@ -1,9 +0,0 @@ -package repository - -type DoesNotExistError struct { - msg string -} - -func (e *DoesNotExistError) Error() string { - return e.msg -} diff --git a/go/pkg/repository/gcs.go b/go/pkg/repository/gcs.go index af90491e..e69888a8 100644 --- a/go/pkg/repository/gcs.go +++ b/go/pkg/repository/gcs.go @@ -17,6 +17,7 @@ import ( "github.com/replicate/replicate/go/pkg/concurrency" "github.com/replicate/replicate/go/pkg/console" + "github.com/replicate/replicate/go/pkg/errors" "github.com/replicate/replicate/go/pkg/files" ) @@ -33,13 +34,13 @@ func NewGCSRepository(bucket, root string) (*GCSRepository, error) { if applicationCredentialsJSON != "" { jwtConfig, err := google.JWTConfigFromJSON([]byte(applicationCredentialsJSON), storage.ScopeReadWrite) if err != nil { - return nil, err + return nil, errors.RepositoryConfigurationError(err.Error()) } options = append(options, option.WithTokenSource(jwtConfig.TokenSource(context.TODO()))) } client, err := storage.NewClient(context.TODO(), options...) if err != nil { - return nil, fmt.Errorf("Failed to connect to Google Cloud Storage: %w", err) + return nil, errors.RepositoryConfigurationError(fmt.Sprintf("Failed to connect to Google Cloud Storage: %v", err)) } return &GCSRepository{ @@ -65,15 +66,15 @@ func (s *GCSRepository) Get(path string) ([]byte, error) { reader, err := obj.NewReader(context.TODO()) if err != nil { if err == storage.ErrObjectNotExist { - return nil, &DoesNotExistError{msg: "Get: path does not exist: " + pathString} + return nil, errors.DoesNotExist(fmt.Sprintf("Get: path does not exist: %s", pathString)) } - return nil, fmt.Errorf("Failed to open %s: %s", pathString, err) + return nil, errors.ReadError(fmt.Sprintf("Failed to open %s: %s", pathString, err)) } // FIXME: unhandled error defer reader.Close() data, err := ioutil.ReadAll(reader) if err != nil { - return nil, fmt.Errorf("Failed to read %s: %s", pathString, err) + return nil, errors.ReadError(fmt.Sprintf("Failed to read %s: %s", pathString, err)) } return data, nil @@ -88,7 +89,7 @@ func (s *GCSRepository) Delete(path string) error { return obj.Delete(context.TODO()) }) if err != nil { - return fmt.Errorf("Failed to delete %s/%s: %w", s.RootURL(), path, err) + return errors.WriteError(fmt.Sprintf("Failed to delete %s/%s: %v", s.RootURL(), path, err)) } return nil } @@ -102,34 +103,34 @@ func (s *GCSRepository) Put(path string, data []byte) error { writer := obj.NewWriter(context.TODO()) _, err := writer.Write(data) if err != nil { - return fmt.Errorf("Failed to write %q: %w", pathString, err) + return errors.WriteError(fmt.Sprintf("Failed to write %q: %v", pathString, err)) } if err := writer.Close(); err != nil { if strings.Contains(err.Error(), "notFound") { if err := s.ensureBucketExists(); err != nil { - return fmt.Errorf("Error creating bucket: %w", err) + return err } writer := obj.NewWriter(context.TODO()) _, err := writer.Write(data) if err != nil { - return fmt.Errorf("Failed to write %q: %w", pathString, err) + return errors.WriteError(fmt.Sprintf("Failed to write %q: %v", pathString, err)) } if err := writer.Close(); err != nil { - return fmt.Errorf("Failed to write %q: %w", pathString, err) + return errors.WriteError(fmt.Sprintf("Failed to write %q: %v", pathString, err)) } return nil } - return fmt.Errorf("Failed to write %q: %w", pathString, err) + return errors.WriteError(fmt.Sprintf("Failed to write %q: %v", pathString, err)) } return nil } func (s *GCSRepository) PutPath(localPath string, repoPath string) error { files, err := getListOfFilesToPut(localPath, filepath.Join(s.root, repoPath)) - bucket := s.client.Bucket(s.bucketName) if err != nil { return err } + bucket := s.client.Bucket(s.bucketName) queue := concurrency.NewWorkerQueue(context.Background(), maxWorkers) for _, file := range files { // Variables used in closure @@ -153,10 +154,13 @@ func (s *GCSRepository) PutPath(localPath string, repoPath string) error { return nil }) if err != nil { - return err + return errors.WriteError(err.Error()) } } - return queue.Wait() + if err := queue.Wait(); err != nil { + return errors.WriteError(err.Error()) + } + return nil } func (s *GCSRepository) PutPathTar(localPath, tarPath, includePath string) error { @@ -164,7 +168,7 @@ func (s *GCSRepository) PutPathTar(localPath, tarPath, includePath string) error return fmt.Errorf("PutPathTar: tarPath must end with .tar.gz") } if err := s.ensureBucketExists(); err != nil { - return fmt.Errorf("Error creating bucket: %w", err) + return err } key := filepath.Join(s.root, tarPath) @@ -173,9 +177,12 @@ func (s *GCSRepository) PutPathTar(localPath, tarPath, includePath string) error writer := obj.NewWriter(context.TODO()) if err := putPathTar(localPath, writer, filepath.Base(tarPath), includePath); err != nil { - return err + return errors.WriteError(err.Error()) } - return writer.Close() + if err := writer.Close(); err != nil { + return errors.WriteError(err.Error()) + } + return nil } // List files in a path non-recursively @@ -200,7 +207,7 @@ func (s *GCSRepository) List(dir string) ([]string, error) { break } if err != nil { - return nil, fmt.Errorf("Failed to list %s/%s: %s", s.RootURL(), dir, err) + return nil, errors.ReadError(fmt.Sprintf("Failed to list %s/%s: %s", s.RootURL(), dir, err)) } p := attrs.Name if s.root != "" { @@ -230,7 +237,7 @@ func (s *GCSRepository) ListTarFile(tarPath string) ([]string, error) { return []string{}, err } if !exists { - return nil, &DoesNotExistError{msg: "Path does not exist: " + tmptarball} + return nil, errors.DoesNotExist("Path does not exist: " + tmptarball) } files, err := getListOfFilesInTar(tmptarball) @@ -302,35 +309,35 @@ func (s *GCSRepository) GetPath(repoDir string, localDir string) error { gcsPathString := fmt.Sprintf("gs://%s/%s", s.bucketName, obj.ObjectName()) reader, err := obj.NewReader(context.TODO()) if err != nil { - return fmt.Errorf("Failed to open %s: %w", gcsPathString, err) + return errors.ReadError(fmt.Sprintf("Failed to open %s: %v", gcsPathString, err)) } defer reader.Close() relPath, err := filepath.Rel(prefix, obj.ObjectName()) if err != nil { - return fmt.Errorf("Failed to determine directory of %s relative to %s: %w", obj.ObjectName(), repoDir, err) + return errors.ReadError(fmt.Sprintf("Failed to determine directory of %s relative to %s: %v", obj.ObjectName(), repoDir, err)) } localPath := filepath.Join(localDir, relPath) localDir := filepath.Dir(localPath) if err := os.MkdirAll(localDir, 0755); err != nil { - return fmt.Errorf("Failed to create directory %s: %w", localDir, err) + return errors.ReadError(fmt.Sprintf("Failed to create directory %s: %v", localDir, err)) } f, err := os.Create(localPath) if err != nil { - return fmt.Errorf("Failed to create file %s: %w", localPath, err) + return errors.ReadError(fmt.Sprintf("Failed to create file %s: %v", localPath, err)) } defer f.Close() console.Debug("Downloading %s to %s", gcsPathString, localPath) if _, err := io.Copy(f, reader); err != nil { - return fmt.Errorf("Failed to copy %s to %s: %w", gcsPathString, localPath, err) + return errors.ReadError(fmt.Sprintf("Failed to copy %s to %s: %v", gcsPathString, localPath, err)) } return nil }) if err != nil { - return fmt.Errorf("Failed to copy gs://%s/%s to %s: %w", s.bucketName, repoDir, localDir, err) + return fmt.Errorf("Failed to copy gs://%s/%s to %s: %v", s.bucketName, repoDir, localDir, err) } return nil } @@ -352,7 +359,7 @@ func (s *GCSRepository) GetPathTar(tarPath, localPath string) error { return err } if !exists { - return &DoesNotExistError{msg: "GetPathTar: does not exist: " + tmptarball} + return errors.DoesNotExist(fmt.Sprintf("Path does not exist: %s", tmptarball)) } return extractTar(tmptarball, localPath) } @@ -374,7 +381,7 @@ func (s *GCSRepository) GetPathItemTar(tarPath, itemPath, localPath string) erro return err } if !exists { - return &DoesNotExistError{msg: "GetPathTar: does not exist: " + tmptarball} + return errors.DoesNotExist("Path does not exist: " + tmptarball) } return extractTarItem(tmptarball, itemPath, localPath) } @@ -388,7 +395,7 @@ func (s *GCSRepository) bucketExists() (bool, error) { if err == storage.ErrBucketNotExist { return false, nil } - return false, fmt.Errorf("Failed to determine if bucket gs://%s exists: %w", s.bucketName, err) + return false, errors.RepositoryConfigurationError(fmt.Sprintf("Failed to determine if bucket gs://%s exists: %v", s.bucketName, err)) } func (s *GCSRepository) ensureBucketExists() error { @@ -409,7 +416,7 @@ func (s *GCSRepository) CreateBucket() error { } bucket := s.client.Bucket(s.bucketName) if err := bucket.Create(context.TODO(), projectID, nil); err != nil { - return fmt.Errorf("Failed to create bucket gs://%s: %w", s.bucketName, err) + return fmt.Errorf("Failed to create bucket gs://%s: %v", s.bucketName, err) } return nil } @@ -467,7 +474,7 @@ func discoverProjectID() (string, error) { if ee, ok := err.(*exec.ExitError); ok { stderr += "\n" + string(ee.Stderr) } - return "", fmt.Errorf("Failed to determine default GCP project (using gcloud config config-helper): %w\n%s", err, stderr) + return "", errors.RepositoryConfigurationError(fmt.Sprintf("Failed to determine default GCP project (using gcloud config config-helper): %v\n%s", err, stderr)) } return strings.TrimSpace(string(out)), nil } diff --git a/go/pkg/repository/gcs_test.go b/go/pkg/repository/gcs_test.go index fdafb485..6bbc7e5e 100644 --- a/go/pkg/repository/gcs_test.go +++ b/go/pkg/repository/gcs_test.go @@ -14,6 +14,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/api/iterator" + "github.com/replicate/replicate/go/pkg/errors" "github.com/replicate/replicate/go/pkg/files" "github.com/replicate/replicate/go/pkg/hash" ) @@ -102,7 +103,7 @@ func TestGCSRepository(t *testing.T) { tmpDir, err := files.TempDir("test") require.NoError(t, err) err = repository.GetPathTar("does-not-exist.tar.gz", tmpDir) - require.IsType(t, &DoesNotExistError{}, err) + require.True(t, errors.IsDoesNotExist(err)) }) clearGCSBucket(t, bucket) diff --git a/go/pkg/repository/repository.go b/go/pkg/repository/repository.go index 8bb5ca62..e524a62b 100644 --- a/go/pkg/repository/repository.go +++ b/go/pkg/repository/repository.go @@ -13,6 +13,7 @@ import ( "github.com/mholt/archiver/v3" gitignore "github.com/sabhiram/go-gitignore" + "github.com/replicate/replicate/go/pkg/errors" "github.com/replicate/replicate/go/pkg/files" ) @@ -115,13 +116,16 @@ func SplitURL(repositoryURL string) (scheme Scheme, bucket string, root string, return "", "", "", unknownRepositoryScheme(u.Scheme) } -func ForURL(repositoryURL string) (Repository, error) { +func ForURL(repositoryURL string, projectDir string) (Repository, error) { scheme, bucket, root, err := SplitURL(repositoryURL) if err != nil { return nil, err } switch scheme { case SchemeDisk: + if !filepath.IsAbs(root) { + root = path.Join(projectDir, root) + } return NewDiskRepository(root) case SchemeS3: return NewS3Repository(bucket, root) @@ -205,7 +209,7 @@ func putPathTar(localPath string, out io.Writer, tarFileName string, includePath z := archiver.NewTarGz() if err := z.Create(out); err != nil { - return err + return errors.WriteError(err.Error()) } defer z.Close() @@ -233,11 +237,14 @@ func putPathTar(localPath string, out io.Writer, tarFileName string, includePath }) fh.Close() if err != nil { - return err + return errors.WriteError(err.Error()) } } // Explicitly call Close() on success to capture error. - return z.Close() + if err := z.Close(); err != nil { + return errors.WriteError(err.Error()) + } + return nil } func extractTar(tarPath, localPath string) error { @@ -283,7 +290,7 @@ func extractTarItem(tarPath, itemPath, localPath string) error { } if !itemPathExists { - return &DoesNotExistError{msg: "Path does not exist inside the tarfile: " + itemPath} + return errors.DoesNotExist("Path does not exist inside the tarfile: " + itemPath) } tmpDir, err := files.TempDir("temp-extract-dir") diff --git a/go/pkg/repository/repository_test.go b/go/pkg/repository/repository_test.go index 121f0e77..19edb637 100644 --- a/go/pkg/repository/repository_test.go +++ b/go/pkg/repository/repository_test.go @@ -11,6 +11,7 @@ import ( "github.com/stretchr/testify/require" + "github.com/replicate/replicate/go/pkg/errors" "github.com/replicate/replicate/go/pkg/files" ) @@ -148,5 +149,5 @@ func TestExtractTarItem(t *testing.T) { // Extract a file that does not exist err = extractTarItem(path.Join(dir, "temp.tar.gz"), "does-not-exist.txt", tmpDir) - require.IsType(t, &DoesNotExistError{}, err) + require.True(t, errors.IsDoesNotExist(err)) } diff --git a/go/pkg/repository/s3.go b/go/pkg/repository/s3.go index 73bf62cf..1dcbb9c9 100644 --- a/go/pkg/repository/s3.go +++ b/go/pkg/repository/s3.go @@ -21,6 +21,7 @@ import ( "github.com/replicate/replicate/go/pkg/concurrency" "github.com/replicate/replicate/go/pkg/console" + "github.com/replicate/replicate/go/pkg/errors" "github.com/replicate/replicate/go/pkg/files" ) @@ -46,7 +47,7 @@ func NewS3Repository(bucket, root string) (*S3Repository, error) { CredentialsChainVerboseErrors: aws.Bool(true), }) if err != nil { - return nil, fmt.Errorf("Failed to connect to S3: %s", err) + return nil, errors.RepositoryConfigurationError(fmt.Sprintf("Failed to connect to S3: %s", err)) } s.svc = s3.New(s.sess) @@ -71,14 +72,14 @@ func (s *S3Repository) Get(path string) ([]byte, error) { if err != nil { if aerr, ok := err.(awserr.Error); ok { if aerr.Code() == s3.ErrCodeNoSuchKey { - return nil, &DoesNotExistError{msg: "Get: path does not exist: " + path} + return nil, errors.DoesNotExist(fmt.Sprintf("Get: path does not exist: %v", path)) } } - return nil, fmt.Errorf("Failed to read %s/%s: %s", s.RootURL(), path, err) + return nil, errors.ReadError(fmt.Sprintf("Failed to read %s/%s: %s", s.RootURL(), path, err)) } body, err := ioutil.ReadAll(obj.Body) if err != nil { - return nil, fmt.Errorf("Failed to read body from %s/%s: %s", s.RootURL(), path, err) + return nil, errors.ReadError(fmt.Sprintf("Failed to read body from %s/%s: %s", s.RootURL(), path, err)) } return body, nil } @@ -91,7 +92,7 @@ func (s *S3Repository) Delete(path string) error { Prefix: &key, }) if err := s3manager.NewBatchDeleteWithClient(s.svc).Delete(aws.BackgroundContext(), iter); err != nil { - return fmt.Errorf("Failed to delete %s/%s: %w", s.RootURL(), path, err) + return errors.WriteError(fmt.Sprintf("Failed to delete %s/%s: %v", s.RootURL(), path, err)) } return nil } @@ -106,7 +107,7 @@ func (s *S3Repository) Put(path string, data []byte) error { Body: bytes.NewReader(data), }) if err != nil { - return fmt.Errorf("Unable to upload to %s/%s: %w", s.RootURL(), path, err) + return errors.WriteError(fmt.Sprintf("Unable to upload to %s/%s: %v", s.RootURL(), path, err)) } return nil } @@ -114,7 +115,7 @@ func (s *S3Repository) Put(path string, data []byte) error { func (s *S3Repository) PutPath(localPath string, destPath string) error { files, err := getListOfFilesToPut(localPath, filepath.Join(s.root, destPath)) if err != nil { - return err + return errors.WriteError(err.Error()) } queue := concurrency.NewWorkerQueue(context.Background(), maxWorkers) @@ -136,11 +137,14 @@ func (s *S3Repository) PutPath(localPath string, destPath string) error { return err }) if err != nil { - return err + return errors.WriteError(err.Error()) } } - return queue.Wait() + if err := queue.Wait(); err != nil { + return errors.WriteError(err.Error()) + } + return nil } func (s *S3Repository) PutPathTar(localPath, tarPath, includePath string) error { @@ -169,7 +173,10 @@ func (s *S3Repository) PutPathTar(localPath, tarPath, includePath string) error }) return err }) - return errs.Wait() + if err := errs.Wait(); err != nil { + return errors.WriteError(err.Error()) + } + return nil } // GetPath recursively copies repoDir to localDir @@ -196,23 +203,23 @@ func (s *S3Repository) GetPath(remoteDir string, localDir string) error { return true }) if err != nil { - return fmt.Errorf("Failed to list objects in s3://%s/%s: %w", s.bucketName, prefix, err) + return errors.ReadError(fmt.Sprintf("Failed to list objects in s3://%s/%s: %v", s.bucketName, prefix, err)) } for _, key := range keys { relPath, err := filepath.Rel(prefix, *key) if err != nil { - return fmt.Errorf("Failed to determine directory of %s relative to %s: %w", *key, prefix, err) + return fmt.Errorf("Failed to determine directory of %s relative to %s: %v", *key, prefix, err) } localPath := filepath.Join(localDir, relPath) localDir := filepath.Dir(localPath) if err := os.MkdirAll(localDir, 0755); err != nil { - return fmt.Errorf("Failed to create directory %s: %w", localDir, err) + return fmt.Errorf("Failed to create directory %s: %v", localDir, err) } f, err := os.Create(localPath) if err != nil { - return fmt.Errorf("Failed to create file %s: %w", localPath, err) + return fmt.Errorf("Failed to create file %s: %v", localPath, err) } console.Debug("Downloading %s to %s", *key, localPath) @@ -229,7 +236,7 @@ func (s *S3Repository) GetPath(remoteDir string, localDir string) error { downloader := s3manager.NewDownloader(s.sess) if err := downloader.DownloadWithIterator(aws.BackgroundContext(), iter); err != nil { - return fmt.Errorf("Failed to download s3://%s/%s to %s", s.bucketName, prefix, localDir) + return errors.ReadError(fmt.Sprintf("Failed to download s3://%s/%s to %s", s.bucketName, prefix, localDir)) } return nil } @@ -251,7 +258,7 @@ func (s *S3Repository) GetPathTar(tarPath, localPath string) error { return err } if !exists { - return &DoesNotExistError{msg: "GetPathTar: does not exist: " + tmptarball} + return errors.DoesNotExist(fmt.Sprintf("GetPathTar: does not exist: %v", tmptarball)) } return extractTar(tmptarball, localPath) } @@ -273,7 +280,7 @@ func (s *S3Repository) GetPathItemTar(tarPath, itemPath, localPath string) error return err } if !exists { - return &DoesNotExistError{msg: "Path does not exist: " + tmptarball} + return errors.DoesNotExist("Path does not exist: " + tmptarball) } return extractTarItem(tmptarball, itemPath, localPath) } @@ -314,7 +321,10 @@ func (s *S3Repository) List(dir string) ([]string, error) { } return true }) - return results, err + if err != nil { + return nil, errors.ReadError(err.Error()) + } + return results, nil } func (s *S3Repository) ListTarFile(tarPath string) ([]string, error) { @@ -334,7 +344,7 @@ func (s *S3Repository) ListTarFile(tarPath string) ([]string, error) { return nil, err } if !exists { - return nil, &DoesNotExistError{msg: "Path does not exist: " + tmptarball} + return nil, errors.DoesNotExist("Path does not exist: " + tmptarball) } files, err := getListOfFilesInTar(tmptarball) @@ -364,13 +374,17 @@ func CreateS3Bucket(region, bucket string) (err error) { Bucket: aws.String(bucket), }) if err != nil { - return fmt.Errorf("Unable to create bucket %q, %w", bucket, err) + return errors.WriteError(fmt.Sprintf("Unable to create bucket %q, %v", bucket, err)) } // Default max attempts is 20, but we hit this sometimes - return svc.WaitUntilBucketExistsWithContext(aws.BackgroundContext(), &s3.HeadBucketInput{ + err = svc.WaitUntilBucketExistsWithContext(aws.BackgroundContext(), &s3.HeadBucketInput{ Bucket: aws.String(bucket), }, request.WithWaiterMaxAttempts(50)) + if err != nil { + return errors.WriteError(err.Error()) + } + return nil } func DeleteS3Bucket(region, bucket string) (err error) { @@ -379,7 +393,7 @@ func DeleteS3Bucket(region, bucket string) (err error) { CredentialsChainVerboseErrors: aws.Bool(true), }) if err != nil { - return fmt.Errorf("Failed to connect to S3: %w", err) + return fmt.Errorf("Failed to connect to S3: %v", err) } svc := s3.New(sess) @@ -388,13 +402,13 @@ func DeleteS3Bucket(region, bucket string) (err error) { }) if err := s3manager.NewBatchDeleteWithClient(svc).Delete(aws.BackgroundContext(), iter); err != nil { - return fmt.Errorf("Unable to delete objects from bucket %q, %w", bucket, err) + return errors.WriteError(fmt.Sprintf("Unable to delete objects from bucket %q, %v", bucket, err)) } _, err = svc.DeleteBucket(&s3.DeleteBucketInput{ Bucket: aws.String(bucket), }) if err != nil { - return fmt.Errorf("Unable to delete bucket %q, %w", bucket, err) + return errors.WriteError(fmt.Sprintf("Unable to delete bucket %q, %v", bucket, err)) } return nil } @@ -435,7 +449,11 @@ func (s *S3Repository) listRecursive(results chan<- ListResult, dir string, filt func discoverBucketRegion(bucket string) (string, error) { sess := session.Must(session.NewSession(&aws.Config{})) ctx := context.Background() - return s3manager.GetBucketRegion(ctx, sess, bucket, "us-east-1") + region, err := s3manager.GetBucketRegion(ctx, sess, bucket, "us-east-1") + if err != nil { + return "", err + } + return region, nil } func getBucketRegionOrCreateBucket(bucket string) (string, error) { @@ -448,7 +466,7 @@ func getBucketRegionOrCreateBucket(bucket string) (string, error) { // TODO (bfirsh): report to use that this is being created, in a way that is compatible with shared library region = "us-east-1" if err := CreateS3Bucket(region, bucket); err != nil { - return "", fmt.Errorf("Error creating bucket: %w", err) + return "", fmt.Errorf("Error creating bucket: %v", err) } return region, nil } diff --git a/go/pkg/repository/s3_test.go b/go/pkg/repository/s3_test.go index 59145fe2..1d9b1a87 100644 --- a/go/pkg/repository/s3_test.go +++ b/go/pkg/repository/s3_test.go @@ -14,6 +14,7 @@ import ( "github.com/aws/aws-sdk-go/service/s3" "github.com/stretchr/testify/require" + "github.com/replicate/replicate/go/pkg/errors" "github.com/replicate/replicate/go/pkg/files" "github.com/replicate/replicate/go/pkg/hash" ) @@ -38,7 +39,7 @@ func TestS3RepositoryGet(t *testing.T) { _, err = repository.Get("does-not-exist") fmt.Println(err) - require.IsType(t, &DoesNotExistError{}, err) + require.True(t, errors.IsDoesNotExist(err)) } func TestS3GetPathTar(t *testing.T) { @@ -51,7 +52,7 @@ func TestS3GetPathTar(t *testing.T) { tmpDir, err := files.TempDir("test") require.NoError(t, err) err = repository.GetPathTar("does-not-exist.tar.gz", tmpDir) - require.IsType(t, &DoesNotExistError{}, err) + require.True(t, errors.IsDoesNotExist(err)) } func TestS3RepositoryPutPath(t *testing.T) { diff --git a/go/pkg/repository/spec.go b/go/pkg/repository/spec.go new file mode 100644 index 00000000..42733fde --- /dev/null +++ b/go/pkg/repository/spec.go @@ -0,0 +1,42 @@ +package repository + +import ( + "encoding/json" + "fmt" + + "github.com/replicate/replicate/go/pkg/errors" +) + +const Version = 1 +const SpecPath = "repository.json" + +type Spec struct { + Version int `json:"version"` +} + +// LoadSpec returns the repository spec, or nil if the repository doesn't have a spec file +func LoadSpec(r Repository) (*Spec, error) { + raw, err := r.Get(SpecPath) + if err != nil { + if errors.IsDoesNotExist(err) { + return nil, nil + } + return nil, fmt.Errorf("Failed to read %s/%s: %v", r.RootURL(), SpecPath, err) + } + + spec := &Spec{} + if err := json.Unmarshal(raw, spec); err != nil { + return nil, errors.CorruptedRepositorySpec(r.RootURL(), SpecPath, err) + } + + return spec, nil +} + +func WriteSpec(r Repository) error { + spec := Spec{Version: Version} + raw, err := json.Marshal(&spec) + if err != nil { + panic(err) // should never happen + } + return r.Put(SpecPath, raw) +} diff --git a/go/pkg/repository/sync_test.go b/go/pkg/repository/sync_test.go index da5b3043..af271874 100644 --- a/go/pkg/repository/sync_test.go +++ b/go/pkg/repository/sync_test.go @@ -7,6 +7,8 @@ import ( "testing" "github.com/stretchr/testify/require" + + "github.com/replicate/replicate/go/pkg/errors" ) func TestSync(t *testing.T) { @@ -51,7 +53,7 @@ func TestSync(t *testing.T) { require.Equal(t, []byte("hello"), data) _, err = destRepository.Get("dest-path/in-dest-but-not-in-source") - require.IsType(t, &DoesNotExistError{}, err) + require.True(t, errors.IsDoesNotExist(err)) data, err = destRepository.Get("dest-path/different-content") require.NoError(t, err) diff --git a/go/pkg/servicepb/replicate.pb.go b/go/pkg/servicepb/replicate.pb.go new file mode 100644 index 00000000..d68b7429 --- /dev/null +++ b/go/pkg/servicepb/replicate.pb.go @@ -0,0 +1,2067 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.25.0 +// protoc v3.14.0 +// source: replicate.proto + +package servicepb + +import ( + reflect "reflect" + sync "sync" + + proto "github.com/golang/protobuf/proto" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// This is a compile-time assertion that a sufficiently up-to-date version +// of the legacy proto package is being used. +const _ = proto.ProtoPackageIsVersion4 + +type GetExperimentStatusReply_Status int32 + +const ( + GetExperimentStatusReply_RUNNING GetExperimentStatusReply_Status = 0 + GetExperimentStatusReply_STOPPED GetExperimentStatusReply_Status = 1 +) + +// Enum value maps for GetExperimentStatusReply_Status. +var ( + GetExperimentStatusReply_Status_name = map[int32]string{ + 0: "RUNNING", + 1: "STOPPED", + } + GetExperimentStatusReply_Status_value = map[string]int32{ + "RUNNING": 0, + "STOPPED": 1, + } +) + +func (x GetExperimentStatusReply_Status) Enum() *GetExperimentStatusReply_Status { + p := new(GetExperimentStatusReply_Status) + *p = x + return p +} + +func (x GetExperimentStatusReply_Status) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (GetExperimentStatusReply_Status) Descriptor() protoreflect.EnumDescriptor { + return file_replicate_proto_enumTypes[0].Descriptor() +} + +func (GetExperimentStatusReply_Status) Type() protoreflect.EnumType { + return &file_replicate_proto_enumTypes[0] +} + +func (x GetExperimentStatusReply_Status) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use GetExperimentStatusReply_Status.Descriptor instead. +func (GetExperimentStatusReply_Status) EnumDescriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{17, 0} +} + +type PrimaryMetric_Goal int32 + +const ( + PrimaryMetric_MAXIMIZE PrimaryMetric_Goal = 0 + PrimaryMetric_MINIMIZE PrimaryMetric_Goal = 1 +) + +// Enum value maps for PrimaryMetric_Goal. +var ( + PrimaryMetric_Goal_name = map[int32]string{ + 0: "MAXIMIZE", + 1: "MINIMIZE", + } + PrimaryMetric_Goal_value = map[string]int32{ + "MAXIMIZE": 0, + "MINIMIZE": 1, + } +) + +func (x PrimaryMetric_Goal) Enum() *PrimaryMetric_Goal { + p := new(PrimaryMetric_Goal) + *p = x + return p +} + +func (x PrimaryMetric_Goal) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (PrimaryMetric_Goal) Descriptor() protoreflect.EnumDescriptor { + return file_replicate_proto_enumTypes[1].Descriptor() +} + +func (PrimaryMetric_Goal) Type() protoreflect.EnumType { + return &file_replicate_proto_enumTypes[1] +} + +func (x PrimaryMetric_Goal) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use PrimaryMetric_Goal.Descriptor instead. +func (PrimaryMetric_Goal) EnumDescriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{21, 0} +} + +type CreateExperimentRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Experiment *Experiment `protobuf:"bytes,1,opt,name=experiment,proto3" json:"experiment,omitempty"` + DisableHeartbeat bool `protobuf:"varint,2,opt,name=disableHeartbeat,proto3" json:"disableHeartbeat,omitempty"` + Quiet bool `protobuf:"varint,3,opt,name=quiet,proto3" json:"quiet,omitempty"` +} + +func (x *CreateExperimentRequest) Reset() { + *x = CreateExperimentRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateExperimentRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateExperimentRequest) ProtoMessage() {} + +func (x *CreateExperimentRequest) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateExperimentRequest.ProtoReflect.Descriptor instead. +func (*CreateExperimentRequest) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{0} +} + +func (x *CreateExperimentRequest) GetExperiment() *Experiment { + if x != nil { + return x.Experiment + } + return nil +} + +func (x *CreateExperimentRequest) GetDisableHeartbeat() bool { + if x != nil { + return x.DisableHeartbeat + } + return false +} + +func (x *CreateExperimentRequest) GetQuiet() bool { + if x != nil { + return x.Quiet + } + return false +} + +type CreateExperimentReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Experiment *Experiment `protobuf:"bytes,1,opt,name=experiment,proto3" json:"experiment,omitempty"` +} + +func (x *CreateExperimentReply) Reset() { + *x = CreateExperimentReply{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateExperimentReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateExperimentReply) ProtoMessage() {} + +func (x *CreateExperimentReply) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateExperimentReply.ProtoReflect.Descriptor instead. +func (*CreateExperimentReply) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{1} +} + +func (x *CreateExperimentReply) GetExperiment() *Experiment { + if x != nil { + return x.Experiment + } + return nil +} + +type CreateCheckpointRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Checkpoint *Checkpoint `protobuf:"bytes,1,opt,name=checkpoint,proto3" json:"checkpoint,omitempty"` + Quiet bool `protobuf:"varint,2,opt,name=quiet,proto3" json:"quiet,omitempty"` +} + +func (x *CreateCheckpointRequest) Reset() { + *x = CreateCheckpointRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateCheckpointRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateCheckpointRequest) ProtoMessage() {} + +func (x *CreateCheckpointRequest) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateCheckpointRequest.ProtoReflect.Descriptor instead. +func (*CreateCheckpointRequest) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{2} +} + +func (x *CreateCheckpointRequest) GetCheckpoint() *Checkpoint { + if x != nil { + return x.Checkpoint + } + return nil +} + +func (x *CreateCheckpointRequest) GetQuiet() bool { + if x != nil { + return x.Quiet + } + return false +} + +type CreateCheckpointReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Checkpoint *Checkpoint `protobuf:"bytes,1,opt,name=checkpoint,proto3" json:"checkpoint,omitempty"` +} + +func (x *CreateCheckpointReply) Reset() { + *x = CreateCheckpointReply{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateCheckpointReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateCheckpointReply) ProtoMessage() {} + +func (x *CreateCheckpointReply) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateCheckpointReply.ProtoReflect.Descriptor instead. +func (*CreateCheckpointReply) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{3} +} + +func (x *CreateCheckpointReply) GetCheckpoint() *Checkpoint { + if x != nil { + return x.Checkpoint + } + return nil +} + +type SaveExperimentRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Experiment *Experiment `protobuf:"bytes,1,opt,name=experiment,proto3" json:"experiment,omitempty"` + Quiet bool `protobuf:"varint,2,opt,name=quiet,proto3" json:"quiet,omitempty"` +} + +func (x *SaveExperimentRequest) Reset() { + *x = SaveExperimentRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SaveExperimentRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SaveExperimentRequest) ProtoMessage() {} + +func (x *SaveExperimentRequest) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SaveExperimentRequest.ProtoReflect.Descriptor instead. +func (*SaveExperimentRequest) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{4} +} + +func (x *SaveExperimentRequest) GetExperiment() *Experiment { + if x != nil { + return x.Experiment + } + return nil +} + +func (x *SaveExperimentRequest) GetQuiet() bool { + if x != nil { + return x.Quiet + } + return false +} + +type SaveExperimentReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Experiment *Experiment `protobuf:"bytes,1,opt,name=experiment,proto3" json:"experiment,omitempty"` +} + +func (x *SaveExperimentReply) Reset() { + *x = SaveExperimentReply{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SaveExperimentReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SaveExperimentReply) ProtoMessage() {} + +func (x *SaveExperimentReply) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SaveExperimentReply.ProtoReflect.Descriptor instead. +func (*SaveExperimentReply) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{5} +} + +func (x *SaveExperimentReply) GetExperiment() *Experiment { + if x != nil { + return x.Experiment + } + return nil +} + +type StopExperimentRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ExperimentID string `protobuf:"bytes,1,opt,name=experimentID,proto3" json:"experimentID,omitempty"` +} + +func (x *StopExperimentRequest) Reset() { + *x = StopExperimentRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *StopExperimentRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*StopExperimentRequest) ProtoMessage() {} + +func (x *StopExperimentRequest) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use StopExperimentRequest.ProtoReflect.Descriptor instead. +func (*StopExperimentRequest) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{6} +} + +func (x *StopExperimentRequest) GetExperimentID() string { + if x != nil { + return x.ExperimentID + } + return "" +} + +type StopExperimentReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *StopExperimentReply) Reset() { + *x = StopExperimentReply{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *StopExperimentReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*StopExperimentReply) ProtoMessage() {} + +func (x *StopExperimentReply) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use StopExperimentReply.ProtoReflect.Descriptor instead. +func (*StopExperimentReply) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{7} +} + +type GetExperimentRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ExperimentIDPrefix string `protobuf:"bytes,1,opt,name=experimentIDPrefix,proto3" json:"experimentIDPrefix,omitempty"` +} + +func (x *GetExperimentRequest) Reset() { + *x = GetExperimentRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetExperimentRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetExperimentRequest) ProtoMessage() {} + +func (x *GetExperimentRequest) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetExperimentRequest.ProtoReflect.Descriptor instead. +func (*GetExperimentRequest) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{8} +} + +func (x *GetExperimentRequest) GetExperimentIDPrefix() string { + if x != nil { + return x.ExperimentIDPrefix + } + return "" +} + +type GetExperimentReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Experiment *Experiment `protobuf:"bytes,1,opt,name=experiment,proto3" json:"experiment,omitempty"` +} + +func (x *GetExperimentReply) Reset() { + *x = GetExperimentReply{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetExperimentReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetExperimentReply) ProtoMessage() {} + +func (x *GetExperimentReply) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetExperimentReply.ProtoReflect.Descriptor instead. +func (*GetExperimentReply) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{9} +} + +func (x *GetExperimentReply) GetExperiment() *Experiment { + if x != nil { + return x.Experiment + } + return nil +} + +type ListExperimentsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *ListExperimentsRequest) Reset() { + *x = ListExperimentsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListExperimentsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListExperimentsRequest) ProtoMessage() {} + +func (x *ListExperimentsRequest) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListExperimentsRequest.ProtoReflect.Descriptor instead. +func (*ListExperimentsRequest) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{10} +} + +type ListExperimentsReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Experiments []*Experiment `protobuf:"bytes,1,rep,name=experiments,proto3" json:"experiments,omitempty"` +} + +func (x *ListExperimentsReply) Reset() { + *x = ListExperimentsReply{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListExperimentsReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListExperimentsReply) ProtoMessage() {} + +func (x *ListExperimentsReply) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListExperimentsReply.ProtoReflect.Descriptor instead. +func (*ListExperimentsReply) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{11} +} + +func (x *ListExperimentsReply) GetExperiments() []*Experiment { + if x != nil { + return x.Experiments + } + return nil +} + +type DeleteExperimentRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ExperimentID string `protobuf:"bytes,1,opt,name=experimentID,proto3" json:"experimentID,omitempty"` +} + +func (x *DeleteExperimentRequest) Reset() { + *x = DeleteExperimentRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteExperimentRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteExperimentRequest) ProtoMessage() {} + +func (x *DeleteExperimentRequest) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteExperimentRequest.ProtoReflect.Descriptor instead. +func (*DeleteExperimentRequest) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{12} +} + +func (x *DeleteExperimentRequest) GetExperimentID() string { + if x != nil { + return x.ExperimentID + } + return "" +} + +type DeleteExperimentReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteExperimentReply) Reset() { + *x = DeleteExperimentReply{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteExperimentReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteExperimentReply) ProtoMessage() {} + +func (x *DeleteExperimentReply) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteExperimentReply.ProtoReflect.Descriptor instead. +func (*DeleteExperimentReply) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{13} +} + +type CheckoutCheckpointRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + CheckpointIDPrefix string `protobuf:"bytes,1,opt,name=checkpointIDPrefix,proto3" json:"checkpointIDPrefix,omitempty"` + OutputDirectory string `protobuf:"bytes,2,opt,name=outputDirectory,proto3" json:"outputDirectory,omitempty"` + Quiet bool `protobuf:"varint,3,opt,name=quiet,proto3" json:"quiet,omitempty"` +} + +func (x *CheckoutCheckpointRequest) Reset() { + *x = CheckoutCheckpointRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CheckoutCheckpointRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CheckoutCheckpointRequest) ProtoMessage() {} + +func (x *CheckoutCheckpointRequest) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CheckoutCheckpointRequest.ProtoReflect.Descriptor instead. +func (*CheckoutCheckpointRequest) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{14} +} + +func (x *CheckoutCheckpointRequest) GetCheckpointIDPrefix() string { + if x != nil { + return x.CheckpointIDPrefix + } + return "" +} + +func (x *CheckoutCheckpointRequest) GetOutputDirectory() string { + if x != nil { + return x.OutputDirectory + } + return "" +} + +func (x *CheckoutCheckpointRequest) GetQuiet() bool { + if x != nil { + return x.Quiet + } + return false +} + +type CheckoutCheckpointReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *CheckoutCheckpointReply) Reset() { + *x = CheckoutCheckpointReply{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CheckoutCheckpointReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CheckoutCheckpointReply) ProtoMessage() {} + +func (x *CheckoutCheckpointReply) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CheckoutCheckpointReply.ProtoReflect.Descriptor instead. +func (*CheckoutCheckpointReply) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{15} +} + +type GetExperimentStatusRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ExperimentID string `protobuf:"bytes,1,opt,name=experimentID,proto3" json:"experimentID,omitempty"` +} + +func (x *GetExperimentStatusRequest) Reset() { + *x = GetExperimentStatusRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetExperimentStatusRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetExperimentStatusRequest) ProtoMessage() {} + +func (x *GetExperimentStatusRequest) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetExperimentStatusRequest.ProtoReflect.Descriptor instead. +func (*GetExperimentStatusRequest) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{16} +} + +func (x *GetExperimentStatusRequest) GetExperimentID() string { + if x != nil { + return x.ExperimentID + } + return "" +} + +type GetExperimentStatusReply struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Status GetExperimentStatusReply_Status `protobuf:"varint,1,opt,name=status,proto3,enum=service.GetExperimentStatusReply_Status" json:"status,omitempty"` +} + +func (x *GetExperimentStatusReply) Reset() { + *x = GetExperimentStatusReply{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetExperimentStatusReply) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetExperimentStatusReply) ProtoMessage() {} + +func (x *GetExperimentStatusReply) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetExperimentStatusReply.ProtoReflect.Descriptor instead. +func (*GetExperimentStatusReply) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{17} +} + +func (x *GetExperimentStatusReply) GetStatus() GetExperimentStatusReply_Status { + if x != nil { + return x.Status + } + return GetExperimentStatusReply_RUNNING +} + +type Experiment struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Created *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=created,proto3" json:"created,omitempty"` + Params map[string]*ParamType `protobuf:"bytes,3,rep,name=params,proto3" json:"params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Host string `protobuf:"bytes,4,opt,name=host,proto3" json:"host,omitempty"` + User string `protobuf:"bytes,5,opt,name=user,proto3" json:"user,omitempty"` + Config *Config `protobuf:"bytes,6,opt,name=config,proto3" json:"config,omitempty"` + Command string `protobuf:"bytes,7,opt,name=command,proto3" json:"command,omitempty"` + Path string `protobuf:"bytes,8,opt,name=path,proto3" json:"path,omitempty"` + PythonPackages map[string]string `protobuf:"bytes,9,rep,name=pythonPackages,proto3" json:"pythonPackages,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + PythonVersion string `protobuf:"bytes,10,opt,name=pythonVersion,proto3" json:"pythonVersion,omitempty"` + Checkpoints []*Checkpoint `protobuf:"bytes,11,rep,name=checkpoints,proto3" json:"checkpoints,omitempty"` + ReplicateVersion string `protobuf:"bytes,12,opt,name=replicateVersion,proto3" json:"replicateVersion,omitempty"` +} + +func (x *Experiment) Reset() { + *x = Experiment{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Experiment) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Experiment) ProtoMessage() {} + +func (x *Experiment) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Experiment.ProtoReflect.Descriptor instead. +func (*Experiment) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{18} +} + +func (x *Experiment) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *Experiment) GetCreated() *timestamppb.Timestamp { + if x != nil { + return x.Created + } + return nil +} + +func (x *Experiment) GetParams() map[string]*ParamType { + if x != nil { + return x.Params + } + return nil +} + +func (x *Experiment) GetHost() string { + if x != nil { + return x.Host + } + return "" +} + +func (x *Experiment) GetUser() string { + if x != nil { + return x.User + } + return "" +} + +func (x *Experiment) GetConfig() *Config { + if x != nil { + return x.Config + } + return nil +} + +func (x *Experiment) GetCommand() string { + if x != nil { + return x.Command + } + return "" +} + +func (x *Experiment) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *Experiment) GetPythonPackages() map[string]string { + if x != nil { + return x.PythonPackages + } + return nil +} + +func (x *Experiment) GetPythonVersion() string { + if x != nil { + return x.PythonVersion + } + return "" +} + +func (x *Experiment) GetCheckpoints() []*Checkpoint { + if x != nil { + return x.Checkpoints + } + return nil +} + +func (x *Experiment) GetReplicateVersion() string { + if x != nil { + return x.ReplicateVersion + } + return "" +} + +type Config struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Repository string `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"` + // for backwards compatibility + Storage string `protobuf:"bytes,2,opt,name=storage,proto3" json:"storage,omitempty"` +} + +func (x *Config) Reset() { + *x = Config{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Config) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Config) ProtoMessage() {} + +func (x *Config) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Config.ProtoReflect.Descriptor instead. +func (*Config) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{19} +} + +func (x *Config) GetRepository() string { + if x != nil { + return x.Repository + } + return "" +} + +func (x *Config) GetStorage() string { + if x != nil { + return x.Storage + } + return "" +} + +type Checkpoint struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Created *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=created,proto3" json:"created,omitempty"` + Metrics map[string]*ParamType `protobuf:"bytes,3,rep,name=metrics,proto3" json:"metrics,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Step int64 `protobuf:"varint,4,opt,name=step,proto3" json:"step,omitempty"` + Path string `protobuf:"bytes,5,opt,name=path,proto3" json:"path,omitempty"` + PrimaryMetric *PrimaryMetric `protobuf:"bytes,6,opt,name=primaryMetric,proto3" json:"primaryMetric,omitempty"` +} + +func (x *Checkpoint) Reset() { + *x = Checkpoint{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Checkpoint) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Checkpoint) ProtoMessage() {} + +func (x *Checkpoint) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Checkpoint.ProtoReflect.Descriptor instead. +func (*Checkpoint) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{20} +} + +func (x *Checkpoint) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *Checkpoint) GetCreated() *timestamppb.Timestamp { + if x != nil { + return x.Created + } + return nil +} + +func (x *Checkpoint) GetMetrics() map[string]*ParamType { + if x != nil { + return x.Metrics + } + return nil +} + +func (x *Checkpoint) GetStep() int64 { + if x != nil { + return x.Step + } + return 0 +} + +func (x *Checkpoint) GetPath() string { + if x != nil { + return x.Path + } + return "" +} + +func (x *Checkpoint) GetPrimaryMetric() *PrimaryMetric { + if x != nil { + return x.PrimaryMetric + } + return nil +} + +type PrimaryMetric struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Goal PrimaryMetric_Goal `protobuf:"varint,2,opt,name=goal,proto3,enum=service.PrimaryMetric_Goal" json:"goal,omitempty"` +} + +func (x *PrimaryMetric) Reset() { + *x = PrimaryMetric{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PrimaryMetric) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PrimaryMetric) ProtoMessage() {} + +func (x *PrimaryMetric) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PrimaryMetric.ProtoReflect.Descriptor instead. +func (*PrimaryMetric) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{21} +} + +func (x *PrimaryMetric) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *PrimaryMetric) GetGoal() PrimaryMetric_Goal { + if x != nil { + return x.Goal + } + return PrimaryMetric_MAXIMIZE +} + +type ParamType struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Value: + // *ParamType_BoolValue + // *ParamType_IntValue + // *ParamType_FloatValue + // *ParamType_StringValue + // *ParamType_ObjectValueJson + Value isParamType_Value `protobuf_oneof:"value"` +} + +func (x *ParamType) Reset() { + *x = ParamType{} + if protoimpl.UnsafeEnabled { + mi := &file_replicate_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ParamType) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ParamType) ProtoMessage() {} + +func (x *ParamType) ProtoReflect() protoreflect.Message { + mi := &file_replicate_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ParamType.ProtoReflect.Descriptor instead. +func (*ParamType) Descriptor() ([]byte, []int) { + return file_replicate_proto_rawDescGZIP(), []int{22} +} + +func (m *ParamType) GetValue() isParamType_Value { + if m != nil { + return m.Value + } + return nil +} + +func (x *ParamType) GetBoolValue() bool { + if x, ok := x.GetValue().(*ParamType_BoolValue); ok { + return x.BoolValue + } + return false +} + +func (x *ParamType) GetIntValue() int64 { + if x, ok := x.GetValue().(*ParamType_IntValue); ok { + return x.IntValue + } + return 0 +} + +func (x *ParamType) GetFloatValue() float64 { + if x, ok := x.GetValue().(*ParamType_FloatValue); ok { + return x.FloatValue + } + return 0 +} + +func (x *ParamType) GetStringValue() string { + if x, ok := x.GetValue().(*ParamType_StringValue); ok { + return x.StringValue + } + return "" +} + +func (x *ParamType) GetObjectValueJson() string { + if x, ok := x.GetValue().(*ParamType_ObjectValueJson); ok { + return x.ObjectValueJson + } + return "" +} + +type isParamType_Value interface { + isParamType_Value() +} + +type ParamType_BoolValue struct { + BoolValue bool `protobuf:"varint,1,opt,name=boolValue,proto3,oneof"` +} + +type ParamType_IntValue struct { + IntValue int64 `protobuf:"varint,2,opt,name=intValue,proto3,oneof"` +} + +type ParamType_FloatValue struct { + FloatValue float64 `protobuf:"fixed64,3,opt,name=floatValue,proto3,oneof"` +} + +type ParamType_StringValue struct { + StringValue string `protobuf:"bytes,4,opt,name=stringValue,proto3,oneof"` +} + +type ParamType_ObjectValueJson struct { + ObjectValueJson string `protobuf:"bytes,5,opt,name=objectValueJson,proto3,oneof"` +} + +func (*ParamType_BoolValue) isParamType_Value() {} + +func (*ParamType_IntValue) isParamType_Value() {} + +func (*ParamType_FloatValue) isParamType_Value() {} + +func (*ParamType_StringValue) isParamType_Value() {} + +func (*ParamType_ObjectValueJson) isParamType_Value() {} + +var File_replicate_proto protoreflect.FileDescriptor + +var file_replicate_proto_rawDesc = []byte{ + 0x0a, 0x0f, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x12, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x90, 0x01, 0x0a, 0x17, + 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x33, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x65, 0x72, + 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, + 0x52, 0x0a, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x2a, 0x0a, 0x10, + 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x48, + 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x71, 0x75, 0x69, 0x65, + 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x71, 0x75, 0x69, 0x65, 0x74, 0x22, 0x4c, + 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, + 0x6e, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x33, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x65, 0x72, + 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, + 0x52, 0x0a, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x64, 0x0a, 0x17, + 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x33, 0x0a, 0x0a, 0x63, 0x68, 0x65, 0x63, 0x6b, + 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, + 0x52, 0x0a, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x14, 0x0a, 0x05, + 0x71, 0x75, 0x69, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x71, 0x75, 0x69, + 0x65, 0x74, 0x22, 0x4c, 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x68, 0x65, 0x63, + 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x33, 0x0a, 0x0a, 0x63, + 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x13, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, + 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x0a, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, + 0x22, 0x62, 0x0a, 0x15, 0x53, 0x61, 0x76, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, + 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x33, 0x0a, 0x0a, 0x65, 0x78, 0x70, + 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, + 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, + 0x6e, 0x74, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x14, + 0x0a, 0x05, 0x71, 0x75, 0x69, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x71, + 0x75, 0x69, 0x65, 0x74, 0x22, 0x4a, 0x0a, 0x13, 0x53, 0x61, 0x76, 0x65, 0x45, 0x78, 0x70, 0x65, + 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x33, 0x0a, 0x0a, 0x65, + 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x13, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, + 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, + 0x22, 0x3b, 0x0a, 0x15, 0x53, 0x74, 0x6f, 0x70, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, + 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x22, 0x0a, 0x0c, 0x65, 0x78, 0x70, + 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x44, 0x22, 0x15, 0x0a, + 0x13, 0x53, 0x74, 0x6f, 0x70, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x46, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, + 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2e, 0x0a, 0x12, + 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x44, 0x50, 0x72, 0x65, 0x66, + 0x69, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, + 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x44, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x22, 0x49, 0x0a, 0x12, + 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x70, + 0x6c, 0x79, 0x12, 0x33, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, + 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x0a, 0x65, 0x78, 0x70, + 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x18, 0x0a, 0x16, 0x4c, 0x69, 0x73, 0x74, 0x45, + 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x22, 0x4d, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x35, 0x0a, 0x0b, 0x65, 0x78, 0x70, + 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, + 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, + 0x65, 0x6e, 0x74, 0x52, 0x0b, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, + 0x22, 0x3d, 0x0a, 0x17, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, + 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x22, 0x0a, 0x0c, 0x65, + 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x44, 0x22, + 0x17, 0x0a, 0x15, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, + 0x65, 0x6e, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x8b, 0x01, 0x0a, 0x19, 0x43, 0x68, 0x65, + 0x63, 0x6b, 0x6f, 0x75, 0x74, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2e, 0x0a, 0x12, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, + 0x6f, 0x69, 0x6e, 0x74, 0x49, 0x44, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x12, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x49, 0x44, + 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x12, 0x28, 0x0a, 0x0f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, + 0x12, 0x14, 0x0a, 0x05, 0x71, 0x75, 0x69, 0x65, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x05, 0x71, 0x75, 0x69, 0x65, 0x74, 0x22, 0x19, 0x0a, 0x17, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x6f, + 0x75, 0x74, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x65, 0x70, 0x6c, + 0x79, 0x22, 0x40, 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, + 0x6e, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x22, 0x0a, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x44, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, + 0x74, 0x49, 0x44, 0x22, 0x80, 0x01, 0x0a, 0x18, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, + 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, + 0x12, 0x40, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x28, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x45, 0x78, + 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, + 0x70, 0x6c, 0x79, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x22, 0x22, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, + 0x52, 0x55, 0x4e, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x54, 0x4f, + 0x50, 0x50, 0x45, 0x44, 0x10, 0x01, 0x22, 0xf6, 0x04, 0x0a, 0x0a, 0x45, 0x78, 0x70, 0x65, 0x72, + 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x34, 0x0a, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, + 0x6d, 0x70, 0x52, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x12, 0x37, 0x0a, 0x06, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, + 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x75, 0x73, 0x65, 0x72, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x75, 0x73, 0x65, 0x72, 0x12, 0x27, 0x0a, 0x06, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, + 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, + 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, + 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, + 0x61, 0x74, 0x68, 0x12, 0x4f, 0x0a, 0x0e, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x50, 0x61, 0x63, + 0x6b, 0x61, 0x67, 0x65, 0x73, 0x18, 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, + 0x2e, 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0e, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x50, 0x61, 0x63, 0x6b, + 0x61, 0x67, 0x65, 0x73, 0x12, 0x24, 0x0a, 0x0d, 0x70, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x56, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x70, 0x79, 0x74, + 0x68, 0x6f, 0x6e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x35, 0x0a, 0x0b, 0x63, 0x68, + 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x13, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, + 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x0b, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, + 0x73, 0x12, 0x2a, 0x0a, 0x10, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x65, 0x56, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x72, 0x65, 0x70, + 0x6c, 0x69, 0x63, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x4d, 0x0a, + 0x0b, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x28, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, + 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x54, 0x79, 0x70, + 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x41, 0x0a, 0x13, + 0x50, 0x79, 0x74, 0x68, 0x6f, 0x6e, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, + 0x42, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x1e, 0x0a, 0x0a, 0x72, 0x65, 0x70, + 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, + 0x65, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x74, 0x6f, + 0x72, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x22, 0xc4, 0x02, 0x0a, 0x0a, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, + 0x6e, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x34, 0x0a, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, + 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x12, 0x3a, 0x0a, 0x07, 0x6d, 0x65, 0x74, 0x72, + 0x69, 0x63, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x73, 0x65, 0x72, 0x76, + 0x69, 0x63, 0x65, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x2e, 0x4d, + 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x6d, 0x65, 0x74, + 0x72, 0x69, 0x63, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x74, 0x65, 0x70, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x04, 0x73, 0x74, 0x65, 0x70, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x3c, 0x0a, 0x0d, + 0x70, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x18, 0x06, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x50, 0x72, + 0x69, 0x6d, 0x61, 0x72, 0x79, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x52, 0x0d, 0x70, 0x72, 0x69, + 0x6d, 0x61, 0x72, 0x79, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x1a, 0x4e, 0x0a, 0x0c, 0x4d, 0x65, + 0x74, 0x72, 0x69, 0x63, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x28, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x54, 0x79, 0x70, 0x65, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x78, 0x0a, 0x0d, 0x50, 0x72, + 0x69, 0x6d, 0x61, 0x72, 0x79, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x12, 0x12, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, + 0x2f, 0x0a, 0x04, 0x67, 0x6f, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, + 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x50, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x4d, + 0x65, 0x74, 0x72, 0x69, 0x63, 0x2e, 0x47, 0x6f, 0x61, 0x6c, 0x52, 0x04, 0x67, 0x6f, 0x61, 0x6c, + 0x22, 0x22, 0x0a, 0x04, 0x47, 0x6f, 0x61, 0x6c, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x41, 0x58, 0x49, + 0x4d, 0x49, 0x5a, 0x45, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x49, 0x4e, 0x49, 0x4d, 0x49, + 0x5a, 0x45, 0x10, 0x01, 0x22, 0xc4, 0x01, 0x0a, 0x09, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x54, 0x79, + 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x09, 0x62, 0x6f, 0x6f, 0x6c, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x09, 0x62, 0x6f, 0x6f, 0x6c, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x12, 0x1c, 0x0a, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x12, 0x20, 0x0a, 0x0a, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x12, 0x22, 0x0a, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, + 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x2a, 0x0a, 0x0f, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x4a, 0x73, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, + 0x00, 0x52, 0x0f, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x4a, 0x73, + 0x6f, 0x6e, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x32, 0x97, 0x06, 0x0a, 0x06, + 0x44, 0x61, 0x65, 0x6d, 0x6f, 0x6e, 0x12, 0x56, 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x20, 0x2e, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, + 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x73, + 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, + 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x56, + 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, + 0x6e, 0x74, 0x12, 0x20, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x43, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x43, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x0e, 0x53, 0x61, 0x76, 0x65, 0x45, 0x78, + 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1e, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, + 0x63, 0x65, 0x2e, 0x53, 0x61, 0x76, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, + 0x63, 0x65, 0x2e, 0x53, 0x61, 0x76, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, + 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x0e, 0x53, 0x74, 0x6f, 0x70, + 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1e, 0x2e, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x70, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, + 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x70, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, + 0x65, 0x6e, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x4d, 0x0a, 0x0d, 0x47, 0x65, + 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1d, 0x2e, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, + 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, + 0x6e, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x53, 0x0a, 0x0f, 0x4c, 0x69, 0x73, + 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1f, 0x2e, 0x73, + 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, + 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, + 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x78, 0x70, 0x65, + 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x56, + 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, + 0x6e, 0x74, 0x12, 0x20, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x44, 0x65, 0x6c, + 0x65, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x44, + 0x65, 0x6c, 0x65, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x52, + 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x5c, 0x0a, 0x12, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x6f, + 0x75, 0x74, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x22, 0x2e, 0x73, + 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x6f, 0x75, 0x74, 0x43, + 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x20, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, + 0x6f, 0x75, 0x74, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x52, 0x65, 0x70, + 0x6c, 0x79, 0x22, 0x00, 0x12, 0x5f, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, + 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x23, 0x2e, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x45, 0x78, 0x70, 0x65, 0x72, 0x69, 0x6d, + 0x65, 0x6e, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x21, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x45, 0x78, + 0x70, 0x65, 0x72, 0x69, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, + 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, 0x31, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x63, 0x6f, 0x6d, 0x2f, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x65, 0x2f, 0x72, 0x65, + 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x65, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x73, + 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_replicate_proto_rawDescOnce sync.Once + file_replicate_proto_rawDescData = file_replicate_proto_rawDesc +) + +func file_replicate_proto_rawDescGZIP() []byte { + file_replicate_proto_rawDescOnce.Do(func() { + file_replicate_proto_rawDescData = protoimpl.X.CompressGZIP(file_replicate_proto_rawDescData) + }) + return file_replicate_proto_rawDescData +} + +var file_replicate_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_replicate_proto_msgTypes = make([]protoimpl.MessageInfo, 26) +var file_replicate_proto_goTypes = []interface{}{ + (GetExperimentStatusReply_Status)(0), // 0: service.GetExperimentStatusReply.Status + (PrimaryMetric_Goal)(0), // 1: service.PrimaryMetric.Goal + (*CreateExperimentRequest)(nil), // 2: service.CreateExperimentRequest + (*CreateExperimentReply)(nil), // 3: service.CreateExperimentReply + (*CreateCheckpointRequest)(nil), // 4: service.CreateCheckpointRequest + (*CreateCheckpointReply)(nil), // 5: service.CreateCheckpointReply + (*SaveExperimentRequest)(nil), // 6: service.SaveExperimentRequest + (*SaveExperimentReply)(nil), // 7: service.SaveExperimentReply + (*StopExperimentRequest)(nil), // 8: service.StopExperimentRequest + (*StopExperimentReply)(nil), // 9: service.StopExperimentReply + (*GetExperimentRequest)(nil), // 10: service.GetExperimentRequest + (*GetExperimentReply)(nil), // 11: service.GetExperimentReply + (*ListExperimentsRequest)(nil), // 12: service.ListExperimentsRequest + (*ListExperimentsReply)(nil), // 13: service.ListExperimentsReply + (*DeleteExperimentRequest)(nil), // 14: service.DeleteExperimentRequest + (*DeleteExperimentReply)(nil), // 15: service.DeleteExperimentReply + (*CheckoutCheckpointRequest)(nil), // 16: service.CheckoutCheckpointRequest + (*CheckoutCheckpointReply)(nil), // 17: service.CheckoutCheckpointReply + (*GetExperimentStatusRequest)(nil), // 18: service.GetExperimentStatusRequest + (*GetExperimentStatusReply)(nil), // 19: service.GetExperimentStatusReply + (*Experiment)(nil), // 20: service.Experiment + (*Config)(nil), // 21: service.Config + (*Checkpoint)(nil), // 22: service.Checkpoint + (*PrimaryMetric)(nil), // 23: service.PrimaryMetric + (*ParamType)(nil), // 24: service.ParamType + nil, // 25: service.Experiment.ParamsEntry + nil, // 26: service.Experiment.PythonPackagesEntry + nil, // 27: service.Checkpoint.MetricsEntry + (*timestamppb.Timestamp)(nil), // 28: google.protobuf.Timestamp +} +var file_replicate_proto_depIdxs = []int32{ + 20, // 0: service.CreateExperimentRequest.experiment:type_name -> service.Experiment + 20, // 1: service.CreateExperimentReply.experiment:type_name -> service.Experiment + 22, // 2: service.CreateCheckpointRequest.checkpoint:type_name -> service.Checkpoint + 22, // 3: service.CreateCheckpointReply.checkpoint:type_name -> service.Checkpoint + 20, // 4: service.SaveExperimentRequest.experiment:type_name -> service.Experiment + 20, // 5: service.SaveExperimentReply.experiment:type_name -> service.Experiment + 20, // 6: service.GetExperimentReply.experiment:type_name -> service.Experiment + 20, // 7: service.ListExperimentsReply.experiments:type_name -> service.Experiment + 0, // 8: service.GetExperimentStatusReply.status:type_name -> service.GetExperimentStatusReply.Status + 28, // 9: service.Experiment.created:type_name -> google.protobuf.Timestamp + 25, // 10: service.Experiment.params:type_name -> service.Experiment.ParamsEntry + 21, // 11: service.Experiment.config:type_name -> service.Config + 26, // 12: service.Experiment.pythonPackages:type_name -> service.Experiment.PythonPackagesEntry + 22, // 13: service.Experiment.checkpoints:type_name -> service.Checkpoint + 28, // 14: service.Checkpoint.created:type_name -> google.protobuf.Timestamp + 27, // 15: service.Checkpoint.metrics:type_name -> service.Checkpoint.MetricsEntry + 23, // 16: service.Checkpoint.primaryMetric:type_name -> service.PrimaryMetric + 1, // 17: service.PrimaryMetric.goal:type_name -> service.PrimaryMetric.Goal + 24, // 18: service.Experiment.ParamsEntry.value:type_name -> service.ParamType + 24, // 19: service.Checkpoint.MetricsEntry.value:type_name -> service.ParamType + 2, // 20: service.Daemon.CreateExperiment:input_type -> service.CreateExperimentRequest + 4, // 21: service.Daemon.CreateCheckpoint:input_type -> service.CreateCheckpointRequest + 6, // 22: service.Daemon.SaveExperiment:input_type -> service.SaveExperimentRequest + 8, // 23: service.Daemon.StopExperiment:input_type -> service.StopExperimentRequest + 10, // 24: service.Daemon.GetExperiment:input_type -> service.GetExperimentRequest + 12, // 25: service.Daemon.ListExperiments:input_type -> service.ListExperimentsRequest + 14, // 26: service.Daemon.DeleteExperiment:input_type -> service.DeleteExperimentRequest + 16, // 27: service.Daemon.CheckoutCheckpoint:input_type -> service.CheckoutCheckpointRequest + 18, // 28: service.Daemon.GetExperimentStatus:input_type -> service.GetExperimentStatusRequest + 3, // 29: service.Daemon.CreateExperiment:output_type -> service.CreateExperimentReply + 5, // 30: service.Daemon.CreateCheckpoint:output_type -> service.CreateCheckpointReply + 7, // 31: service.Daemon.SaveExperiment:output_type -> service.SaveExperimentReply + 9, // 32: service.Daemon.StopExperiment:output_type -> service.StopExperimentReply + 11, // 33: service.Daemon.GetExperiment:output_type -> service.GetExperimentReply + 13, // 34: service.Daemon.ListExperiments:output_type -> service.ListExperimentsReply + 15, // 35: service.Daemon.DeleteExperiment:output_type -> service.DeleteExperimentReply + 17, // 36: service.Daemon.CheckoutCheckpoint:output_type -> service.CheckoutCheckpointReply + 19, // 37: service.Daemon.GetExperimentStatus:output_type -> service.GetExperimentStatusReply + 29, // [29:38] is the sub-list for method output_type + 20, // [20:29] is the sub-list for method input_type + 20, // [20:20] is the sub-list for extension type_name + 20, // [20:20] is the sub-list for extension extendee + 0, // [0:20] is the sub-list for field type_name +} + +func init() { file_replicate_proto_init() } +func file_replicate_proto_init() { + if File_replicate_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_replicate_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateExperimentRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateExperimentReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateCheckpointRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateCheckpointReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SaveExperimentRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SaveExperimentReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*StopExperimentRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*StopExperimentReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetExperimentRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetExperimentReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListExperimentsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListExperimentsReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteExperimentRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteExperimentReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CheckoutCheckpointRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CheckoutCheckpointReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetExperimentStatusRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetExperimentStatusReply); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Experiment); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Config); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Checkpoint); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PrimaryMetric); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_replicate_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ParamType); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_replicate_proto_msgTypes[22].OneofWrappers = []interface{}{ + (*ParamType_BoolValue)(nil), + (*ParamType_IntValue)(nil), + (*ParamType_FloatValue)(nil), + (*ParamType_StringValue)(nil), + (*ParamType_ObjectValueJson)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_replicate_proto_rawDesc, + NumEnums: 2, + NumMessages: 26, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_replicate_proto_goTypes, + DependencyIndexes: file_replicate_proto_depIdxs, + EnumInfos: file_replicate_proto_enumTypes, + MessageInfos: file_replicate_proto_msgTypes, + }.Build() + File_replicate_proto = out.File + file_replicate_proto_rawDesc = nil + file_replicate_proto_goTypes = nil + file_replicate_proto_depIdxs = nil +} diff --git a/go/pkg/servicepb/replicate_grpc.pb.go b/go/pkg/servicepb/replicate_grpc.pb.go new file mode 100644 index 00000000..59a12554 --- /dev/null +++ b/go/pkg/servicepb/replicate_grpc.pb.go @@ -0,0 +1,386 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. + +package servicepb + +import ( + context "context" + + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion7 + +// DaemonClient is the client API for Daemon service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type DaemonClient interface { + CreateExperiment(ctx context.Context, in *CreateExperimentRequest, opts ...grpc.CallOption) (*CreateExperimentReply, error) + CreateCheckpoint(ctx context.Context, in *CreateCheckpointRequest, opts ...grpc.CallOption) (*CreateCheckpointReply, error) + SaveExperiment(ctx context.Context, in *SaveExperimentRequest, opts ...grpc.CallOption) (*SaveExperimentReply, error) + StopExperiment(ctx context.Context, in *StopExperimentRequest, opts ...grpc.CallOption) (*StopExperimentReply, error) + GetExperiment(ctx context.Context, in *GetExperimentRequest, opts ...grpc.CallOption) (*GetExperimentReply, error) + ListExperiments(ctx context.Context, in *ListExperimentsRequest, opts ...grpc.CallOption) (*ListExperimentsReply, error) + DeleteExperiment(ctx context.Context, in *DeleteExperimentRequest, opts ...grpc.CallOption) (*DeleteExperimentReply, error) + CheckoutCheckpoint(ctx context.Context, in *CheckoutCheckpointRequest, opts ...grpc.CallOption) (*CheckoutCheckpointReply, error) + GetExperimentStatus(ctx context.Context, in *GetExperimentStatusRequest, opts ...grpc.CallOption) (*GetExperimentStatusReply, error) +} + +type daemonClient struct { + cc grpc.ClientConnInterface +} + +func NewDaemonClient(cc grpc.ClientConnInterface) DaemonClient { + return &daemonClient{cc} +} + +func (c *daemonClient) CreateExperiment(ctx context.Context, in *CreateExperimentRequest, opts ...grpc.CallOption) (*CreateExperimentReply, error) { + out := new(CreateExperimentReply) + err := c.cc.Invoke(ctx, "/service.Daemon/CreateExperiment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *daemonClient) CreateCheckpoint(ctx context.Context, in *CreateCheckpointRequest, opts ...grpc.CallOption) (*CreateCheckpointReply, error) { + out := new(CreateCheckpointReply) + err := c.cc.Invoke(ctx, "/service.Daemon/CreateCheckpoint", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *daemonClient) SaveExperiment(ctx context.Context, in *SaveExperimentRequest, opts ...grpc.CallOption) (*SaveExperimentReply, error) { + out := new(SaveExperimentReply) + err := c.cc.Invoke(ctx, "/service.Daemon/SaveExperiment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *daemonClient) StopExperiment(ctx context.Context, in *StopExperimentRequest, opts ...grpc.CallOption) (*StopExperimentReply, error) { + out := new(StopExperimentReply) + err := c.cc.Invoke(ctx, "/service.Daemon/StopExperiment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *daemonClient) GetExperiment(ctx context.Context, in *GetExperimentRequest, opts ...grpc.CallOption) (*GetExperimentReply, error) { + out := new(GetExperimentReply) + err := c.cc.Invoke(ctx, "/service.Daemon/GetExperiment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *daemonClient) ListExperiments(ctx context.Context, in *ListExperimentsRequest, opts ...grpc.CallOption) (*ListExperimentsReply, error) { + out := new(ListExperimentsReply) + err := c.cc.Invoke(ctx, "/service.Daemon/ListExperiments", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *daemonClient) DeleteExperiment(ctx context.Context, in *DeleteExperimentRequest, opts ...grpc.CallOption) (*DeleteExperimentReply, error) { + out := new(DeleteExperimentReply) + err := c.cc.Invoke(ctx, "/service.Daemon/DeleteExperiment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *daemonClient) CheckoutCheckpoint(ctx context.Context, in *CheckoutCheckpointRequest, opts ...grpc.CallOption) (*CheckoutCheckpointReply, error) { + out := new(CheckoutCheckpointReply) + err := c.cc.Invoke(ctx, "/service.Daemon/CheckoutCheckpoint", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *daemonClient) GetExperimentStatus(ctx context.Context, in *GetExperimentStatusRequest, opts ...grpc.CallOption) (*GetExperimentStatusReply, error) { + out := new(GetExperimentStatusReply) + err := c.cc.Invoke(ctx, "/service.Daemon/GetExperimentStatus", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DaemonServer is the server API for Daemon service. +// All implementations must embed UnimplementedDaemonServer +// for forward compatibility +type DaemonServer interface { + CreateExperiment(context.Context, *CreateExperimentRequest) (*CreateExperimentReply, error) + CreateCheckpoint(context.Context, *CreateCheckpointRequest) (*CreateCheckpointReply, error) + SaveExperiment(context.Context, *SaveExperimentRequest) (*SaveExperimentReply, error) + StopExperiment(context.Context, *StopExperimentRequest) (*StopExperimentReply, error) + GetExperiment(context.Context, *GetExperimentRequest) (*GetExperimentReply, error) + ListExperiments(context.Context, *ListExperimentsRequest) (*ListExperimentsReply, error) + DeleteExperiment(context.Context, *DeleteExperimentRequest) (*DeleteExperimentReply, error) + CheckoutCheckpoint(context.Context, *CheckoutCheckpointRequest) (*CheckoutCheckpointReply, error) + GetExperimentStatus(context.Context, *GetExperimentStatusRequest) (*GetExperimentStatusReply, error) + mustEmbedUnimplementedDaemonServer() +} + +// UnimplementedDaemonServer must be embedded to have forward compatible implementations. +type UnimplementedDaemonServer struct { +} + +func (UnimplementedDaemonServer) CreateExperiment(context.Context, *CreateExperimentRequest) (*CreateExperimentReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateExperiment not implemented") +} +func (UnimplementedDaemonServer) CreateCheckpoint(context.Context, *CreateCheckpointRequest) (*CreateCheckpointReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateCheckpoint not implemented") +} +func (UnimplementedDaemonServer) SaveExperiment(context.Context, *SaveExperimentRequest) (*SaveExperimentReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method SaveExperiment not implemented") +} +func (UnimplementedDaemonServer) StopExperiment(context.Context, *StopExperimentRequest) (*StopExperimentReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method StopExperiment not implemented") +} +func (UnimplementedDaemonServer) GetExperiment(context.Context, *GetExperimentRequest) (*GetExperimentReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetExperiment not implemented") +} +func (UnimplementedDaemonServer) ListExperiments(context.Context, *ListExperimentsRequest) (*ListExperimentsReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListExperiments not implemented") +} +func (UnimplementedDaemonServer) DeleteExperiment(context.Context, *DeleteExperimentRequest) (*DeleteExperimentReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteExperiment not implemented") +} +func (UnimplementedDaemonServer) CheckoutCheckpoint(context.Context, *CheckoutCheckpointRequest) (*CheckoutCheckpointReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method CheckoutCheckpoint not implemented") +} +func (UnimplementedDaemonServer) GetExperimentStatus(context.Context, *GetExperimentStatusRequest) (*GetExperimentStatusReply, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetExperimentStatus not implemented") +} +func (UnimplementedDaemonServer) mustEmbedUnimplementedDaemonServer() {} + +// UnsafeDaemonServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to DaemonServer will +// result in compilation errors. +type UnsafeDaemonServer interface { + mustEmbedUnimplementedDaemonServer() +} + +func RegisterDaemonServer(s grpc.ServiceRegistrar, srv DaemonServer) { + s.RegisterService(&_Daemon_serviceDesc, srv) +} + +func _Daemon_CreateExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DaemonServer).CreateExperiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Daemon/CreateExperiment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DaemonServer).CreateExperiment(ctx, req.(*CreateExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Daemon_CreateCheckpoint_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateCheckpointRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DaemonServer).CreateCheckpoint(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Daemon/CreateCheckpoint", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DaemonServer).CreateCheckpoint(ctx, req.(*CreateCheckpointRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Daemon_SaveExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SaveExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DaemonServer).SaveExperiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Daemon/SaveExperiment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DaemonServer).SaveExperiment(ctx, req.(*SaveExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Daemon_StopExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(StopExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DaemonServer).StopExperiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Daemon/StopExperiment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DaemonServer).StopExperiment(ctx, req.(*StopExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Daemon_GetExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DaemonServer).GetExperiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Daemon/GetExperiment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DaemonServer).GetExperiment(ctx, req.(*GetExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Daemon_ListExperiments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListExperimentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DaemonServer).ListExperiments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Daemon/ListExperiments", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DaemonServer).ListExperiments(ctx, req.(*ListExperimentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Daemon_DeleteExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DaemonServer).DeleteExperiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Daemon/DeleteExperiment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DaemonServer).DeleteExperiment(ctx, req.(*DeleteExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Daemon_CheckoutCheckpoint_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CheckoutCheckpointRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DaemonServer).CheckoutCheckpoint(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Daemon/CheckoutCheckpoint", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DaemonServer).CheckoutCheckpoint(ctx, req.(*CheckoutCheckpointRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Daemon_GetExperimentStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetExperimentStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DaemonServer).GetExperimentStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/service.Daemon/GetExperimentStatus", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DaemonServer).GetExperimentStatus(ctx, req.(*GetExperimentStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Daemon_serviceDesc = grpc.ServiceDesc{ + ServiceName: "service.Daemon", + HandlerType: (*DaemonServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateExperiment", + Handler: _Daemon_CreateExperiment_Handler, + }, + { + MethodName: "CreateCheckpoint", + Handler: _Daemon_CreateCheckpoint_Handler, + }, + { + MethodName: "SaveExperiment", + Handler: _Daemon_SaveExperiment_Handler, + }, + { + MethodName: "StopExperiment", + Handler: _Daemon_StopExperiment_Handler, + }, + { + MethodName: "GetExperiment", + Handler: _Daemon_GetExperiment_Handler, + }, + { + MethodName: "ListExperiments", + Handler: _Daemon_ListExperiments_Handler, + }, + { + MethodName: "DeleteExperiment", + Handler: _Daemon_DeleteExperiment_Handler, + }, + { + MethodName: "CheckoutCheckpoint", + Handler: _Daemon_CheckoutCheckpoint_Handler, + }, + { + MethodName: "GetExperimentStatus", + Handler: _Daemon_GetExperimentStatus_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "replicate.proto", +} diff --git a/go/pkg/shared/heartbeat.go b/go/pkg/shared/heartbeat.go new file mode 100644 index 00000000..0d9ff641 --- /dev/null +++ b/go/pkg/shared/heartbeat.go @@ -0,0 +1,46 @@ +package shared + +import ( + "time" + + "github.com/replicate/replicate/go/pkg/console" + "github.com/replicate/replicate/go/pkg/project" +) + +type HeartbeatProcess struct { + project *project.Project + experimentID string + ticker *time.Ticker + done chan struct{} +} + +func StartHeartbeat(proj *project.Project, experimentID string) *HeartbeatProcess { + h := &HeartbeatProcess{ + project: proj, + experimentID: experimentID, + ticker: time.NewTicker(5 * time.Second), + done: make(chan struct{}), + } + go func() { + for { + select { + case <-h.done: + return + case <-h.ticker.C: + h.Refresh() + } + } + }() + return h +} + +func (h *HeartbeatProcess) Refresh() { + if err := h.project.RefreshHeartbeat(h.experimentID); err != nil { + console.Error("Failed to refresh heartbeat: %v", err) + } +} + +func (h *HeartbeatProcess) Kill() { + h.ticker.Stop() + h.done <- struct{}{} +} diff --git a/go/pkg/shared/pb_convert.go b/go/pkg/shared/pb_convert.go new file mode 100644 index 00000000..92de6cf6 --- /dev/null +++ b/go/pkg/shared/pb_convert.go @@ -0,0 +1,217 @@ +package shared + +import ( + "fmt" + + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/replicate/replicate/go/pkg/config" + "github.com/replicate/replicate/go/pkg/param" + "github.com/replicate/replicate/go/pkg/project" + "github.com/replicate/replicate/go/pkg/servicepb" +) + +// convert from protobuf + +func checkpointsFromPb(checkpointsPb []*servicepb.Checkpoint) []*project.Checkpoint { + if checkpointsPb == nil { + return nil + } + ret := make([]*project.Checkpoint, len(checkpointsPb)) + for i, chkPb := range checkpointsPb { + ret[i] = checkpointFromPb(chkPb) + } + return ret +} + +func checkpointFromPb(chkPb *servicepb.Checkpoint) *project.Checkpoint { + return &project.Checkpoint{ + ID: chkPb.Id, + Created: chkPb.Created.AsTime(), + Metrics: valueMapFromPb(chkPb.Metrics), + Step: chkPb.Step, + Path: chkPb.Path, + PrimaryMetric: primaryMetricFromPb(chkPb.PrimaryMetric), + } +} + +func experimentFromPb(expPb *servicepb.Experiment) *project.Experiment { + return &project.Experiment{ + ID: expPb.Id, + Created: expPb.Created.AsTime(), + Params: valueMapFromPb(expPb.Params), + Host: expPb.Host, + User: expPb.User, + Config: configFromPb(expPb.Config), + Command: expPb.Command, + Path: expPb.Path, + PythonPackages: expPb.PythonPackages, + PythonVersion: expPb.PythonVersion, + Checkpoints: checkpointsFromPb(expPb.Checkpoints), + ReplicateVersion: expPb.ReplicateVersion, + } +} + +func configFromPb(confPb *servicepb.Config) *config.Config { + var conf *config.Config + if confPb != nil { + conf = &config.Config{Repository: confPb.Repository, Storage: confPb.Storage} + } + return conf +} + +func primaryMetricFromPb(pmPb *servicepb.PrimaryMetric) *project.PrimaryMetric { + if pmPb == nil { + return nil + } + var goal project.MetricGoal + switch pmPb.Goal { + case servicepb.PrimaryMetric_MAXIMIZE: + goal = project.GoalMaximize + case servicepb.PrimaryMetric_MINIMIZE: + goal = project.GoalMinimize + } + return &project.PrimaryMetric{ + Name: pmPb.Name, + Goal: goal, + } +} + +func valueMapFromPb(pb map[string]*servicepb.ParamType) map[string]param.Value { + if len(pb) == 0 { + return nil + } + + params := map[string]param.Value{} + for k, v := range pb { + params[k] = valueFromPb(v) + } + return params +} + +func valueFromPb(pb *servicepb.ParamType) param.Value { + switch pb.Value.(type) { + case *servicepb.ParamType_BoolValue: + return param.Bool(pb.GetBoolValue()) + case *servicepb.ParamType_IntValue: + return param.Int(pb.GetIntValue()) + case *servicepb.ParamType_FloatValue: + return param.Float(pb.GetFloatValue()) + case *servicepb.ParamType_StringValue: + return param.String(pb.GetStringValue()) + case *servicepb.ParamType_ObjectValueJson: + return param.ParseFromString(pb.GetObjectValueJson()) + } + panic(fmt.Sprintf("Unknown param type: %v", pb)) // should never happen +} + +// convert to protobuf + +func experimentsToPb(experiments []*project.Experiment) []*servicepb.Experiment { + ret := make([]*servicepb.Experiment, len(experiments)) + for i, exp := range experiments { + ret[i] = experimentToPb(exp) + } + return ret +} + +func experimentToPb(exp *project.Experiment) *servicepb.Experiment { + return &servicepb.Experiment{ + Id: exp.ID, + Created: timestamppb.New(exp.Created), + Params: valueMapToPb(exp.Params), + Host: exp.Host, + User: exp.User, + Config: configToPb(exp.Config), + Command: exp.Command, + Path: exp.Path, + PythonPackages: exp.PythonPackages, + PythonVersion: exp.PythonVersion, + ReplicateVersion: exp.ReplicateVersion, + Checkpoints: checkpointsToPb(exp.Checkpoints), + } +} + +func configToPb(conf *config.Config) *servicepb.Config { + if conf == nil { + return nil + } + return &servicepb.Config{ + Repository: conf.Repository, + Storage: conf.Storage, // deprecated + } +} + +func checkpointsToPb(checkpoints []*project.Checkpoint) []*servicepb.Checkpoint { + if checkpoints == nil { + return nil + } + ret := make([]*servicepb.Checkpoint, len(checkpoints)) + for i, chk := range checkpoints { + ret[i] = checkpointToPb(chk) + } + return ret +} + +func checkpointToPb(chk *project.Checkpoint) *servicepb.Checkpoint { + if chk == nil { + return nil + } + return &servicepb.Checkpoint{ + Id: chk.ID, + Created: timestamppb.New(chk.Created), + Step: chk.Step, + Metrics: valueMapToPb(chk.Metrics), + Path: chk.Path, + PrimaryMetric: primaryMetricToPb(chk.PrimaryMetric), + } +} + +func primaryMetricToPb(pm *project.PrimaryMetric) *servicepb.PrimaryMetric { + var pbPrimaryMetric *servicepb.PrimaryMetric + if pm != nil { + var goal servicepb.PrimaryMetric_Goal + switch pm.Goal { + case project.GoalMaximize: + goal = servicepb.PrimaryMetric_MAXIMIZE + case project.GoalMinimize: + goal = servicepb.PrimaryMetric_MINIMIZE + } + + pbPrimaryMetric = &servicepb.PrimaryMetric{ + Name: pm.Name, + Goal: goal, + } + } + return pbPrimaryMetric +} + +func valueMapToPb(m map[string]param.Value) map[string]*servicepb.ParamType { + if len(m) == 0 { + return nil + } + + pbMap := map[string]*servicepb.ParamType{} + for k, v := range m { + pbMap[k] = valueToPb(v) + } + return pbMap +} + +func valueToPb(v param.Value) *servicepb.ParamType { + switch v.Type() { + case param.TypeBool: + return &servicepb.ParamType{Value: &servicepb.ParamType_BoolValue{BoolValue: v.BoolVal()}} + case param.TypeInt: + return &servicepb.ParamType{Value: &servicepb.ParamType_IntValue{IntValue: v.IntVal()}} + case param.TypeFloat: + return &servicepb.ParamType{Value: &servicepb.ParamType_FloatValue{FloatValue: v.FloatVal()}} + case param.TypeString: + return &servicepb.ParamType{Value: &servicepb.ParamType_StringValue{StringValue: v.StringVal()}} + case param.TypeObject: + return &servicepb.ParamType{Value: &servicepb.ParamType_ObjectValueJson{ObjectValueJson: v.String()}} + case param.TypeNone: + return &servicepb.ParamType{Value: &servicepb.ParamType_ObjectValueJson{ObjectValueJson: "null"}} + } + panic("Uninitiazlied param.Value") // should never happen +} diff --git a/go/pkg/shared/pb_convert_test.go b/go/pkg/shared/pb_convert_test.go new file mode 100644 index 00000000..38798e03 --- /dev/null +++ b/go/pkg/shared/pb_convert_test.go @@ -0,0 +1,197 @@ +package shared + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/replicate/replicate/go/pkg/config" + "github.com/replicate/replicate/go/pkg/param" + "github.com/replicate/replicate/go/pkg/project" + "github.com/replicate/replicate/go/pkg/servicepb" +) + +func fullCheckpointPb() *servicepb.Checkpoint { + return &servicepb.Checkpoint{ + Id: "foo", + Created: timestamppb.New(time.Date(2020, 12, 7, 1, 13, 29, 192682, time.UTC)), + Path: ".", + Step: 123, + Metrics: map[string]*servicepb.ParamType{ + "myint": {Value: &servicepb.ParamType_IntValue{IntValue: 456}}, + "myfloat": {Value: &servicepb.ParamType_FloatValue{FloatValue: 7.89}}, + "mystring": {Value: &servicepb.ParamType_StringValue{StringValue: "value"}}, + "mytrue": {Value: &servicepb.ParamType_BoolValue{BoolValue: true}}, + "myfalse": {Value: &servicepb.ParamType_BoolValue{BoolValue: false}}, + "mylist": {Value: &servicepb.ParamType_ObjectValueJson{ObjectValueJson: "[1,2,3]"}}, + "mymap": {Value: &servicepb.ParamType_ObjectValueJson{ObjectValueJson: `{"bar":"baz"}`}}, + }, + PrimaryMetric: &servicepb.PrimaryMetric{ + Name: "myfloat", + Goal: servicepb.PrimaryMetric_MAXIMIZE, + }, + } +} + +func fullCheckpoint() *project.Checkpoint { + return &project.Checkpoint{ + ID: "foo", + Created: time.Date(2020, 12, 7, 1, 13, 29, 192682, time.UTC), + Path: ".", + Step: 123, + Metrics: map[string]param.Value{ + "myint": param.Int(456), + "myfloat": param.Float(7.89), + "mystring": param.String("value"), + "mytrue": param.Bool(true), + "myfalse": param.Bool(false), + "mylist": param.Object([]interface{}{1.0, 2.0, 3.0}), + "mymap": param.Object(map[string]interface{}{"bar": "baz"}), + }, + PrimaryMetric: &project.PrimaryMetric{Name: "myfloat", Goal: "maximize"}, + } +} + +func emptyCheckpointPb() *servicepb.Checkpoint { + return &servicepb.Checkpoint{ + Id: "foo", + Created: timestamppb.New(time.Date(2020, 12, 7, 1, 13, 29, 192682, time.UTC)), + Step: 0, + } +} + +func emptyCheckpoint() *project.Checkpoint { + return &project.Checkpoint{ + ID: "foo", + Created: time.Date(2020, 12, 7, 1, 13, 29, 192682, time.UTC), + Step: 0, + } +} + +func fullExperimentPb() *servicepb.Experiment { + t := time.Date(2020, 12, 7, 1, 13, 29, 192682, time.UTC) + return &servicepb.Experiment{ + Id: "foo", + Created: timestamppb.New(t), + User: "myuser", + Host: "myhost", + Command: "mycmd", + Config: &servicepb.Config{Repository: "myrepo", Storage: ""}, + Path: "mypath", + Params: map[string]*servicepb.ParamType{ + "myint": {Value: &servicepb.ParamType_IntValue{IntValue: 456}}, + "myfloat": {Value: &servicepb.ParamType_FloatValue{FloatValue: 7.89}}, + "mystring": {Value: &servicepb.ParamType_StringValue{StringValue: "value"}}, + "mytrue": {Value: &servicepb.ParamType_BoolValue{BoolValue: true}}, + "myfalse": {Value: &servicepb.ParamType_BoolValue{BoolValue: false}}, + "mylist": {Value: &servicepb.ParamType_ObjectValueJson{ObjectValueJson: "[1,2,3]"}}, + "mymap": {Value: &servicepb.ParamType_ObjectValueJson{ObjectValueJson: `{"bar":"baz"}`}}, + }, + PythonPackages: map[string]string{"pkg1": "1.1", "pkg2": "2.2"}, + ReplicateVersion: "1.2.3", + Checkpoints: []*servicepb.Checkpoint{ + { + Id: "c1", + Created: timestamppb.New(t.Add(time.Minute * 1)), + Step: 1, + }, + { + Id: "c2", + Created: timestamppb.New(t.Add(time.Minute * 2)), + Step: 2, + }, + }, + } +} + +func fullExperiment() *project.Experiment { + t := time.Date(2020, 12, 7, 1, 13, 29, 192682, time.UTC) + return &project.Experiment{ + ID: "foo", + Created: t, + User: "myuser", + Host: "myhost", + Command: "mycmd", + Config: &config.Config{Repository: "myrepo", Storage: ""}, + Path: "mypath", + Params: map[string]param.Value{ + "myint": param.Int(456), + "myfloat": param.Float(7.89), + "mystring": param.String("value"), + "mytrue": param.Bool(true), + "myfalse": param.Bool(false), + "mylist": param.Object([]interface{}{1.0, 2.0, 3.0}), + "mymap": param.Object(map[string]interface{}{"bar": "baz"}), + }, + PythonPackages: map[string]string{"pkg1": "1.1", "pkg2": "2.2"}, + ReplicateVersion: "1.2.3", + Checkpoints: []*project.Checkpoint{ + {ID: "c1", Created: t.Add(time.Minute * 1), Step: 1}, + {ID: "c2", Created: t.Add(time.Minute * 2), Step: 2}, + }, + } +} + +func emptyExperimentPb() *servicepb.Experiment { + return &servicepb.Experiment{ + Id: "foo", + Created: timestamppb.New(time.Date(2020, 12, 7, 1, 13, 29, 192682, time.UTC)), + } +} + +func emptyExperiment() *project.Experiment { + return &project.Experiment{ + ID: "foo", + Created: time.Date(2020, 12, 7, 1, 13, 29, 192682, time.UTC), + } +} + +func TestConvertCheckpointFromPb(t *testing.T) { + chkPb := fullCheckpointPb() + expected := fullCheckpoint() + require.Equal(t, expected, checkpointFromPb(chkPb)) +} + +func TestConvertEmptyCheckpointFromPb(t *testing.T) { + chkPb := emptyCheckpointPb() + expected := emptyCheckpoint() + require.Equal(t, expected, checkpointFromPb(chkPb)) +} + +func TestConvertExperimentFromPb(t *testing.T) { + expPb := fullExperimentPb() + expected := fullExperiment() + require.Equal(t, expected, experimentFromPb(expPb)) +} + +func TestConvertEmptyExperimentFromPb(t *testing.T) { + expPb := emptyExperimentPb() + expected := emptyExperiment() + require.Equal(t, expected, experimentFromPb(expPb)) +} + +func TestConvertCheckpointToPb(t *testing.T) { + chk := fullCheckpoint() + expected := fullCheckpointPb() + require.Equal(t, expected, checkpointToPb(chk)) +} + +func TestConvertEmptyCheckpointToPb(t *testing.T) { + chk := emptyCheckpoint() + expected := emptyCheckpointPb() + require.Equal(t, expected, checkpointToPb(chk)) +} + +func TestConvertExperimentToPb(t *testing.T) { + exp := fullExperiment() + expected := fullExperimentPb() + require.Equal(t, expected, experimentToPb(exp)) +} + +func TestConvertEmptyExperimentToPb(t *testing.T) { + exp := emptyExperiment() + expected := emptyExperimentPb() + require.Equal(t, expected, experimentToPb(exp)) +} diff --git a/go/pkg/shared/repository.go b/go/pkg/shared/repository.go deleted file mode 100644 index 7662c2b8..00000000 --- a/go/pkg/shared/repository.go +++ /dev/null @@ -1,225 +0,0 @@ -package shared - -import ( - "fmt" - - "github.com/replicate/replicate/go/pkg/repository" -) - -type GetArgs struct { - Bucket, Root, Path string -} - -type GetReturn struct { - Data []byte -} - -type PutArgs struct { - Bucket, Root, Path string - Data []byte -} - -type PutPathArgs struct { - Bucket, Root, Src, Dest string -} - -type PutPathTarArgs struct { - Bucket, Root, LocalPath, TarPath, IncludePath string -} - -type ListArgs struct { - Bucket, Root, Path string -} -type ListReturn struct { - Paths []string -} - -type DeleteArgs struct { - Bucket, Root, Path string -} - -type GetPathTarArgs struct { - Bucket, Root, TarPath, LocalPath string -} - -type GCSRepository struct{} - -func (GCSRepository) Get(args GetArgs, ret *GetReturn) error { - st, err := repository.NewGCSRepository(args.Bucket, args.Root) - if err != nil { - return err - } - ret.Data, err = st.Get(args.Path) - // HACK: net/rpc/jsonrpc doesn't let us include error codes, so prefix with - // predictable error name - if _, ok := err.(*repository.DoesNotExistError); ok { - return fmt.Errorf("DoesNotExistError:: %w", err) - } - return err -} - -func (GCSRepository) Put(args PutArgs, _ *int) error { - st, err := repository.NewGCSRepository(args.Bucket, args.Root) - if err != nil { - return err - } - return st.Put(args.Path, args.Data) -} - -func (GCSRepository) List(args ListArgs, ret *ListReturn) error { - st, err := repository.NewGCSRepository(args.Bucket, args.Root) - if err != nil { - return err - } - ret.Paths, err = st.List(args.Path) - return err -} - -func (GCSRepository) PutPath(args PutPathArgs, _ *int) error { - st, err := repository.NewGCSRepository(args.Bucket, args.Root) - if err != nil { - return err - } - return st.PutPath(args.Src, args.Dest) -} - -func (GCSRepository) PutPathTar(args PutPathTarArgs, _ *int) error { - st, err := repository.NewGCSRepository(args.Bucket, args.Root) - if err != nil { - return err - } - return st.PutPathTar(args.LocalPath, args.TarPath, args.IncludePath) -} - -func (GCSRepository) Delete(args DeleteArgs, _ *int) error { - st, err := repository.NewGCSRepository(args.Bucket, args.Root) - if err != nil { - return err - } - return st.Delete(args.Path) -} - -func (GCSRepository) GetPathTar(args GetPathTarArgs, _ *int) error { - st, err := repository.NewGCSRepository(args.Bucket, args.Root) - if err != nil { - return err - } - err = st.GetPathTar(args.TarPath, args.LocalPath) - // HACK: net/rpc/jsonrpc doesn't let us include error codes, so prefix with - // predictable error name - if _, ok := err.(*repository.DoesNotExistError); ok { - return fmt.Errorf("DoesNotExistError:: %w", err) - } - return err -} - -type S3Repository struct{} - -func (S3Repository) Get(args GetArgs, ret *GetReturn) error { - st, err := repository.NewS3Repository(args.Bucket, args.Root) - if err != nil { - return err - } - ret.Data, err = st.Get(args.Path) - // HACK: net/rpc/jsonrpc doesn't let us include error codes, so prefix with - // predictable error name - if _, ok := err.(*repository.DoesNotExistError); ok { - return fmt.Errorf("DoesNotExistError:: %w", err) - } - return err -} - -func (S3Repository) Put(args PutArgs, _ *int) error { - st, err := repository.NewS3Repository(args.Bucket, args.Root) - if err != nil { - return err - } - return st.Put(args.Path, args.Data) -} - -func (S3Repository) List(args ListArgs, ret *ListReturn) error { - st, err := repository.NewS3Repository(args.Bucket, args.Root) - if err != nil { - return err - } - ret.Paths, err = st.List(args.Path) - return err -} - -func (S3Repository) PutPath(args PutPathArgs, _ *int) error { - st, err := repository.NewS3Repository(args.Bucket, args.Root) - if err != nil { - return err - } - return st.PutPath(args.Src, args.Dest) -} - -func (S3Repository) PutPathTar(args PutPathTarArgs, _ *int) error { - st, err := repository.NewS3Repository(args.Bucket, args.Root) - if err != nil { - return err - } - return st.PutPathTar(args.LocalPath, args.TarPath, args.IncludePath) -} - -func (S3Repository) Delete(args DeleteArgs, _ *int) error { - st, err := repository.NewS3Repository(args.Bucket, args.Root) - if err != nil { - return err - } - return st.Delete(args.Path) -} - -func (S3Repository) GetPathTar(args GetPathTarArgs, _ *int) error { - st, err := repository.NewS3Repository(args.Bucket, args.Root) - if err != nil { - return err - } - err = st.GetPathTar(args.TarPath, args.LocalPath) - // HACK: net/rpc/jsonrpc doesn't let us include error codes, so prefix with - // predictable error name - if _, ok := err.(*repository.DoesNotExistError); ok { - return fmt.Errorf("DoesNotExistError:: %w", err) - } - return err -} - -type DiskRepository struct{} - -func (DiskRepository) PutPath(args PutPathArgs, _ *int) error { - st, err := repository.NewDiskRepository(args.Root) - if err != nil { - return err - } - return st.PutPath(args.Src, args.Dest) -} - -func (DiskRepository) PutPathTar(args PutPathTarArgs, _ *int) error { - st, err := repository.NewDiskRepository(args.Root) - if err != nil { - return err - } - return st.PutPathTar(args.LocalPath, args.TarPath, args.IncludePath) -} - -func (DiskRepository) Delete(args DeleteArgs, _ *int) error { - st, err := repository.NewDiskRepository(args.Root) - if err != nil { - return err - } - return st.Delete(args.Path) -} - -func (DiskRepository) GetPathTar(args GetPathTarArgs, _ *int) error { - st, err := repository.NewDiskRepository(args.Root) - if err != nil { - return err - } - err = st.GetPathTar(args.TarPath, args.LocalPath) - // HACK: net/rpc/jsonrpc doesn't let us include error codes, so prefix with - // predictable error name - if _, ok := err.(*repository.DoesNotExistError); ok { - return fmt.Errorf("DoesNotExistError:: %w", err) - } - return err -} diff --git a/go/pkg/shared/serve.go b/go/pkg/shared/serve.go index 609f961a..fb9844cb 100644 --- a/go/pkg/shared/serve.go +++ b/go/pkg/shared/serve.go @@ -1,36 +1,282 @@ package shared +// TODO(andreas): document this for R API etc + import ( - "io" - "net/rpc" - "net/rpc/jsonrpc" + "context" + "fmt" + "net" "os" + "os/signal" + "syscall" + + "google.golang.org/genproto/googleapis/rpc/errdetails" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "github.com/replicate/replicate/go/pkg/console" + "github.com/replicate/replicate/go/pkg/errors" + "github.com/replicate/replicate/go/pkg/project" + "github.com/replicate/replicate/go/pkg/servicepb" ) -// rwCloser merges a ReadCloser and a WriteCloser into a ReadWriteCloser. -type rwCloser struct { - io.ReadCloser - io.WriteCloser +type projectGetter func() (proj *project.Project, err error) + +type server struct { + servicepb.UnimplementedDaemonServer + + workChan chan func() error + projectGetter projectGetter + project *project.Project + heartbeatsByExperimentID map[string]*HeartbeatProcess +} + +func (s *server) CreateExperiment(ctx context.Context, req *servicepb.CreateExperimentRequest) (*servicepb.CreateExperimentReply, error) { + pbReqExp := req.GetExperiment() + args := project.CreateExperimentArgs{ + Path: pbReqExp.GetPath(), + Command: pbReqExp.GetCommand(), + Params: valueMapFromPb(pbReqExp.GetParams()), + PythonPackages: pbReqExp.GetPythonPackages(), + } + proj, err := s.getProject() + if err != nil { + return nil, handleError(err) + } + exp, err := proj.CreateExperiment(args, true, s.workChan, req.Quiet) + if err != nil { + return nil, handleError(err) + } + if !req.DisableHeartbeat { + s.heartbeatsByExperimentID[exp.ID] = StartHeartbeat(s.project, exp.ID) + } + + pbRetExp := experimentToPb(exp) + return &servicepb.CreateExperimentReply{Experiment: pbRetExp}, nil +} + +func (s *server) CreateCheckpoint(ctx context.Context, req *servicepb.CreateCheckpointRequest) (*servicepb.CreateCheckpointReply, error) { + pbReqChk := req.GetCheckpoint() + args := project.CreateCheckpointArgs{ + Path: pbReqChk.GetPath(), + Metrics: valueMapFromPb(pbReqChk.GetMetrics()), + PrimaryMetric: primaryMetricFromPb(pbReqChk.PrimaryMetric), + Step: pbReqChk.GetStep(), + } + proj, err := s.getProject() + if err != nil { + return nil, handleError(err) + } + chk, err := proj.CreateCheckpoint(args, true, s.workChan, req.Quiet) + if err != nil { + return nil, handleError(err) + } + + pbRetChk := checkpointToPb(chk) + return &servicepb.CreateCheckpointReply{Checkpoint: pbRetChk}, nil } -func (rw rwCloser) Close() error { - err := rw.ReadCloser.Close() - if err := rw.WriteCloser.Close(); err != nil { - return err +func (s *server) SaveExperiment(ctx context.Context, req *servicepb.SaveExperimentRequest) (*servicepb.SaveExperimentReply, error) { + expPb := req.GetExperiment() + exp := experimentFromPb(expPb) + proj, err := s.getProject() + if err != nil { + return nil, handleError(err) + } + exp, err = proj.SaveExperiment(exp, req.Quiet) + if err != nil { + return nil, handleError(err) } - return err + return &servicepb.SaveExperimentReply{Experiment: experimentToPb(exp)}, nil } -func Serve() { - s := rpc.NewServer() - if err := s.Register(S3Repository{}); err != nil { - panic(err) +func (s *server) StopExperiment(ctx context.Context, req *servicepb.StopExperimentRequest) (*servicepb.StopExperimentReply, error) { + if _, ok := s.heartbeatsByExperimentID[req.ExperimentID]; ok { + s.heartbeatsByExperimentID[req.ExperimentID].Kill() + delete(s.heartbeatsByExperimentID, req.ExperimentID) + } + proj, err := s.getProject() + if err != nil { + return nil, handleError(err) + } + if err := proj.StopExperiment(req.ExperimentID); err != nil { + return nil, handleError(err) + } + return &servicepb.StopExperimentReply{}, nil +} + +func (s *server) GetExperiment(ctx context.Context, req *servicepb.GetExperimentRequest) (*servicepb.GetExperimentReply, error) { + proj, err := s.getProject() + if err != nil { + return nil, handleError(err) + } + exp, err := proj.ExperimentFromPrefix(req.ExperimentIDPrefix) + if err != nil { + return nil, handleError(err) + } + expPb := experimentToPb(exp) + return &servicepb.GetExperimentReply{Experiment: expPb}, nil +} + +func (s *server) ListExperiments(ctx context.Context, req *servicepb.ListExperimentsRequest) (*servicepb.ListExperimentsReply, error) { + proj, err := s.getProject() + if err != nil { + return nil, handleError(err) + } + experiments, err := proj.Experiments() + if err != nil { + return nil, handleError(err) + } + experimentsPb := experimentsToPb(experiments) + return &servicepb.ListExperimentsReply{Experiments: experimentsPb}, nil +} + +func (s *server) DeleteExperiment(ctx context.Context, req *servicepb.DeleteExperimentRequest) (*servicepb.DeleteExperimentReply, error) { + proj, err := s.getProject() + if err != nil { + return nil, handleError(err) + } + exp, err := proj.ExperimentByID(req.ExperimentID) + if err != nil { + return nil, handleError(err) + } + if err := s.project.DeleteExperiment(exp); err != nil { + return nil, handleError(err) } - if err := s.Register(GCSRepository{}); err != nil { - panic(err) + // This is slow, see https://github.com/replicate/replicate/issues/333 + for _, checkpoint := range exp.Checkpoints { + if err := s.project.DeleteCheckpoint(checkpoint); err != nil { + return nil, handleError(err) + } } - if err := s.Register(DiskRepository{}); err != nil { - panic(err) + + return &servicepb.DeleteExperimentReply{}, nil +} + +func (s *server) CheckoutCheckpoint(ctx context.Context, req *servicepb.CheckoutCheckpointRequest) (*servicepb.CheckoutCheckpointReply, error) { + proj, err := s.getProject() + if err != nil { + return nil, handleError(err) + } + chk, exp, err := proj.CheckpointFromPrefix(req.CheckpointIDPrefix) + if err != nil { + return nil, handleError(err) + } + + err = s.project.CheckoutCheckpoint(chk, exp, req.OutputDirectory, req.Quiet) + if err != nil { + return nil, handleError(err) + } + return &servicepb.CheckoutCheckpointReply{}, nil +} + +func (s *server) GetExperimentStatus(ctx context.Context, req *servicepb.GetExperimentStatusRequest) (*servicepb.GetExperimentStatusReply, error) { + proj, err := s.getProject() + if err != nil { + return nil, handleError(err) + } + isRunning, err := proj.ExperimentIsRunning(req.ExperimentID) + if err != nil { + return nil, handleError(err) + } + var status servicepb.GetExperimentStatusReply_Status + if isRunning { + status = servicepb.GetExperimentStatusReply_RUNNING + } else { + status = servicepb.GetExperimentStatusReply_STOPPED + } + return &servicepb.GetExperimentStatusReply{Status: status}, nil +} + +func (s *server) getProject() (*project.Project, error) { + // we get the project lazily so that we can return a protobuf exception to the client + // as part of a request flow + + if s.project != nil { + return s.project, nil + } + + proj, err := s.projectGetter() + if err != nil { + return nil, err + } + s.project = proj + return proj, nil +} + +func Serve(projGetter projectGetter, socketPath string) error { + console.Debug("Starting daemon") + + listener, err := net.Listen("unix", socketPath) + if err != nil { + return fmt.Errorf("Failed to open UNIX socket on %s: %w", socketPath, err) + } + + grpcServer := grpc.NewServer() + s := &server{ + // block if there already are two items on the queue, in case uploading is a bottleneck + // TODO(andreas): warn the user if the queue is full, so they know that they should + // upload at a lesser interval + workChan: make(chan func() error, 2), + projectGetter: projGetter, + heartbeatsByExperimentID: make(map[string]*HeartbeatProcess), + } + servicepb.RegisterDaemonServer(grpcServer, s) + + // when the process exits, make sure any pending + // uploads are completed + completedChan := make(chan struct{}) + + sigc := make(chan os.Signal, 1) + signal.Notify(sigc, + syscall.SIGHUP, + syscall.SIGINT, + syscall.SIGTERM, + syscall.SIGQUIT) + + go func() { + <-sigc + console.Debug("Exiting...") + s.workChan <- nil // nil is an exit sentinel + for _, hb := range s.heartbeatsByExperimentID { + hb.Kill() + } + <-completedChan + grpcServer.GracefulStop() + }() + + go func() { + for { + work := <-s.workChan + if work == nil { + completedChan <- struct{}{} + return + } + if err := work(); err != nil { + console.Error("%v", err) + // TODO(andreas): poll status endpoint, put errors in chan of messages to return. also include progress in these messages + } + } + }() + + if err := grpcServer.Serve(listener); err != nil { + return fmt.Errorf("Failed to start server: %w", err) + } + + return nil +} + +func handleError(err error) error { + reason := errors.Code(err) + if reason != "" { + st := status.New(codes.Internal, err.Error()) + details := &errdetails.ErrorInfo{Reason: reason} + st, err := st.WithDetails(details) + if err != nil { + return err + } + return st.Err() } - s.ServeCodec(jsonrpc.NewServerCodec(rwCloser{os.Stdin, os.Stdout})) + return status.Error(codes.Unknown, err.Error()) } diff --git a/go/pkg/testutil/testutil.go b/go/pkg/testutil/testutil.go index 444509a9..1f2fbaf3 100644 --- a/go/pkg/testutil/testutil.go +++ b/go/pkg/testutil/testutil.go @@ -9,8 +9,9 @@ import ( "github.com/stretchr/testify/require" ) -func IP(i int) *int { - return &i +func IP(i int) *int64 { + i64 := int64(i) + return &i64 } func FP(f float64) *float64 { diff --git a/proto/Makefile b/proto/Makefile new file mode 100644 index 00000000..2e0ee376 --- /dev/null +++ b/proto/Makefile @@ -0,0 +1,34 @@ +# TODO: verify protoc is installed (brew install protobuf; go install google.golang.org/protobuf/cmd/protoc-gen-go) +# go get -u github.com/golang/protobuf/{proto,protoc-gen-go} google.golang.org/grpc +# make sure ~/go/bin is in $PATH, but make sure ~ is expanded! ("Strangely protoc can't expand ~" https://stackoverflow.com/a/57731186/135797) +# pip install grpcio +# +# TODO: make the process of installing the proto tools and verifying installation automatic. + +GO_OUTPUT_DIR=../go/pkg/servicepb +PYTHON_OUTPUT_DIR=../python/replicate/servicepb +PROTO_NAME=replicate +SERVICE_PROTO=$(PROTO_NAME).proto + +.PHONY: build +build: + mkdir -p $(GO_OUTPUT_DIR) + mkdir -p $(PYTHON_OUTPUT_DIR) + protoc \ + --go_out=$(GO_OUTPUT_DIR) \ + --go_opt=paths=source_relative \ + --go-grpc_out=$(GO_OUTPUT_DIR) \ + --go-grpc_opt=paths=source_relative \ + --experimental_allow_proto3_optional \ + --mypy_out=$(PYTHON_OUTPUT_DIR) \ + $(SERVICE_PROTO) + python -m grpc_tools.protoc \ + --python_out=$(PYTHON_OUTPUT_DIR) \ + --grpc_python_out=$(PYTHON_OUTPUT_DIR) \ + --proto_path=. \ + --experimental_allow_proto3_optional \ + $(SERVICE_PROTO) + # need to post-process python-generated protobuf to make the import relative + # TODO(andreas): remove this when https://github.com/protocolbuffers/protobuf/pull/7470 is merged + sed -E -i '' 's/^import $(PROTO_NAME)_pb2 as $(PROTO_NAME)__pb2$$/from . import $(PROTO_NAME)_pb2 as $(PROTO_NAME)__pb2/' $(PYTHON_OUTPUT_DIR)/$(PROTO_NAME)_pb2_grpc.py + touch $(PYTHON_OUTPUT_DIR)/__init__.py diff --git a/proto/replicate.proto b/proto/replicate.proto new file mode 100644 index 00000000..dd8edf6e --- /dev/null +++ b/proto/replicate.proto @@ -0,0 +1,149 @@ +syntax = "proto3"; + +// TODO: docstrings + +option go_package = "github.com/replicate/replicate/go/pkg/servicepb"; + +package service; + +import "google/protobuf/timestamp.proto"; + +service Daemon { + rpc CreateExperiment (CreateExperimentRequest) returns (CreateExperimentReply) {} + rpc CreateCheckpoint (CreateCheckpointRequest) returns (CreateCheckpointReply) {} + rpc SaveExperiment (SaveExperimentRequest) returns (SaveExperimentReply) {} + rpc StopExperiment (StopExperimentRequest) returns (StopExperimentReply) {} + rpc GetExperiment (GetExperimentRequest) returns (GetExperimentReply) {} + rpc ListExperiments (ListExperimentsRequest) returns (ListExperimentsReply) {} + rpc DeleteExperiment (DeleteExperimentRequest) returns (DeleteExperimentReply) {} + rpc CheckoutCheckpoint (CheckoutCheckpointRequest) returns (CheckoutCheckpointReply) {} + rpc GetExperimentStatus (GetExperimentStatusRequest) returns (GetExperimentStatusReply) {} +} + +message CreateExperimentRequest { + Experiment experiment = 1; + bool disableHeartbeat = 2; + bool quiet = 3; +} + +message CreateExperimentReply { + Experiment experiment = 1; +} + +message CreateCheckpointRequest { + Checkpoint checkpoint = 1; + bool quiet = 2; +} + +message CreateCheckpointReply { + Checkpoint checkpoint = 1; +} + +message SaveExperimentRequest { + Experiment experiment = 1; + bool quiet = 2; +} + +message SaveExperimentReply { + Experiment experiment = 1; +} + +message StopExperimentRequest { + string experimentID = 1; +} + +message StopExperimentReply { +} + +message GetExperimentRequest { + string experimentIDPrefix = 1; +} + +message GetExperimentReply { + Experiment experiment = 1; +} + +message ListExperimentsRequest { +} + +message ListExperimentsReply { + repeated Experiment experiments = 1; +} + +message DeleteExperimentRequest { + string experimentID = 1; +} + +message DeleteExperimentReply { +} + +message CheckoutCheckpointRequest { + string checkpointIDPrefix = 1; + string outputDirectory = 2; + bool quiet = 3; +} + +message CheckoutCheckpointReply { +} + +message GetExperimentStatusRequest { + string experimentID = 1; +} + +message GetExperimentStatusReply { + enum Status { + RUNNING = 0; + STOPPED = 1; + }; + Status status = 1; +} + +message Experiment { + string id = 1; + google.protobuf.Timestamp created = 2; + map params = 3; + string host = 4; + string user = 5; + Config config = 6; + string command = 7; + string path = 8; + map pythonPackages = 9; + string pythonVersion = 10; + repeated Checkpoint checkpoints = 11; + string replicateVersion = 12; +} + +message Config { + string repository = 1; + + // for backwards compatibility + string storage = 2; +} + +message Checkpoint { + string id = 1; + google.protobuf.Timestamp created = 2; + map metrics = 3; + int64 step = 4; + string path = 5; + PrimaryMetric primaryMetric = 6; +} + +message PrimaryMetric { + enum Goal { + MAXIMIZE = 0; + MINIMIZE = 1; + } + string name = 1; + Goal goal = 2; +} + +message ParamType { + oneof value { + bool boolValue = 1; + int64 intValue = 2; + double floatValue = 3; + string stringValue = 4; + string objectValueJson = 5; + } +} diff --git a/python/.mypy.ini b/python/.mypy.ini index 98f8134a..7d3410dc 100644 --- a/python/.mypy.ini +++ b/python/.mypy.ini @@ -5,3 +5,6 @@ ignore_errors = True [mypy-replicate._vendor.*] ignore_errors = True + +[mypy-replicate.servicepb.*] +ignore_errors = True diff --git a/python/pyproject.toml b/python/pyproject.toml index 54e77e86..58f9d9d5 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,5 +1,5 @@ [tool.black] -exclude = '(\.eggs|\.git|\.hg|\.mypy|_cache|\.nox|\.tox|\.venv|\.svn|_build|buck-out|build|dist|_vendor)' +exclude = '(\.eggs|\.git|\.hg|\.mypy|_cache|\.nox|\.tox|\.venv|\.svn|_build|buck-out|build|dist|_vendor|servicepb)' [tool.vendoring] destination = "replicate/_vendor/" diff --git a/python/replicate/checkpoint.py b/python/replicate/checkpoint.py index dd590841..91bf5314 100644 --- a/python/replicate/checkpoint.py +++ b/python/replicate/checkpoint.py @@ -18,9 +18,7 @@ from ._vendor.typing_extensions import TypedDict from . import console -from .exceptions import DoesNotExistError from .json import CustomJSONEncoder -from .hash import random_hash from .metadata import rfc3339_datetime, parse_rfc3339 from .validate import check_path @@ -118,36 +116,19 @@ def validate(self) -> List[str]: return errors + # TODO(andreas): this is only checking out checkpoints, which + # is different from the CLI where both checkpoints and + # experiments can be checked out with `replicate checkout`. + # Perhaps we should support experiment.checkout() as well? def checkout(self, output_directory: str, quiet: bool = False): """ Copy files from this checkpoint to the output directory. """ - os.makedirs(output_directory, exist_ok=True) - - assert self._experiment is not None - repository = self._experiment._project._get_repository() - no_experiment_files = False - no_checkpoint_files = False - try: - repository.get_path_tar(self._repository_tar_path(), output_directory) - except DoesNotExistError: - no_experiment_files = True - try: - repository.get_path_tar( - self._experiment._repository_tar_path(), output_directory - ) - except DoesNotExistError: - no_checkpoint_files = True - if no_experiment_files and no_checkpoint_files: - raise DoesNotExistError( - f"Could not find any files in checkpoint {self.short_id()} or its experiment {self._experiment.short_id()}. Did you pass the 'path' argument to init() or checkpoint()?" - ) - if not quiet: - console.info( - "Copied the files from checkpoint {} to {}".format( - self.short_id(), output_directory - ) - ) + assert self._experiment + + self._experiment._project._daemon().checkout_checkpoint( + self.id, output_directory, quiet=quiet + ) def open(self, path: str) -> BinaryIO: """ @@ -161,9 +142,6 @@ def open(self, path: str) -> BinaryIO: out_f = io.BytesIO(f.read()) return out_f - def _repository_tar_path(self) -> str: - return "checkpoints/{}.tar.gz".format(self.id) - def _repr_html_(self) -> str: out = '

Checkpoint(id="{}")

'.format( self.id diff --git a/python/replicate/config.py b/python/replicate/config.py deleted file mode 100644 index 4ab771f1..00000000 --- a/python/replicate/config.py +++ /dev/null @@ -1,105 +0,0 @@ -import os -from typing import List, Dict, Any - -from ._vendor import yaml - -from . import console, constants -from .exceptions import ConfigNotFoundError - - -class ConfigValidationError(Exception): - def __str__(self): - return ( - super().__str__() - + "\n\nSee the documentation for more details: " - + constants.YAML_REFERENCE_DOCS_URL - ) - - -def load_config(project_dir: str) -> Dict[str, Any]: - """ - Loads config from directory - """ - data_found = False - for filename in ["replicate.yaml", "replicate.yml"]: - if os.path.isfile(os.path.join(project_dir, filename)): - data_found = True - with open(os.path.join(project_dir, filename)) as fh: - data = yaml.safe_load(fh) - - if data_found: - break - - if not data_found: - raise ConfigNotFoundError( - "replicate.yaml was not found in {}".format(project_dir) - ) - # Empty file - if data is None: - data = {} - - # if replicate is running inside docker and repository is disk, - # REPLICATE_REPOSITORY is mounted to the value of repository: in - # replicate.yaml - if "REPLICATE_REPOSITORY" in os.environ: - data["repository"] = os.environ["REPLICATE_REPOSITORY"] - - return validate_and_set_defaults(data, project_dir) - - -# This should be rigorously validated, see https://github.com/replicate/replicate/issues/330 -VALID_KEYS = [ - "repository", - "storage", # deprecated -] -REQUIRED_KEYS: List[str] = ["repository"] - - -def validate_and_set_defaults(data: Dict[str, Any], project_dir: str) -> Dict[str, Any]: - if data.get("storage"): - if data.get("repository"): - raise ConfigValidationError( - "Both 'storage' (deprecated) and 'repository' are defined in replicate.yaml, please only use 'repository'" - ) - - console.warn( - "'storage' is deprecated in replicate.yaml, please use 'repository'" - ) - data["repository"] = data["storage"] - del data["storage"] - - defaults = get_default_config() - - for key, value in defaults.items(): - if key not in data: - data[key] = value - - for key, value in data.items(): - if key not in VALID_KEYS: - raise ConfigValidationError( - "The option '{}' is in replicate.yaml, but it is not supported.".format( - key - ) - ) - - if key == "repository": - if not isinstance(value, str): - raise ConfigValidationError( - "The option 'repository' in replicate.yaml needs to be a string." - ) - - # check for required keys last since repository is set from - # storage for backwards compatibility - for key in REQUIRED_KEYS: - if key not in data: - raise ConfigValidationError( - "The option '{}' is required in replicate.yaml, but you have not set it.".format( - key - ) - ) - - return data - - -def get_default_config() -> Dict[str, Any]: - return {} diff --git a/python/replicate/constants.py b/python/replicate/constants.py index 5c0a1ddc..d8be8813 100644 --- a/python/replicate/constants.py +++ b/python/replicate/constants.py @@ -3,6 +3,3 @@ PYTHON_REFERENCE_DOCS_URL = DOCS_URL + "/reference/python" YAML_REFERENCE_DOCS_URL = DOCS_URL + "/reference/yaml" REPOSITORY_VERSION = 1 -HEARTBEAT_MISS_TOLERANCE = 3 -EXPERIMENT_STATUS_RUNNING = "running" -EXPERIMENT_STATUS_STOPPED = "stopped" diff --git a/python/replicate/daemon.py b/python/replicate/daemon.py new file mode 100644 index 00000000..34aeb928 --- /dev/null +++ b/python/replicate/daemon.py @@ -0,0 +1,256 @@ +# TODO: docstring +# TODO: rename to shared? + +import functools +import tempfile +import os +from typing import Optional, Dict, Any, List +import subprocess +import atexit +import sys +import threading + +import grpc # type: ignore +from google.rpc import status_pb2, error_details_pb2 # type: ignore + +from .servicepb.replicate_pb2_grpc import DaemonStub +from .servicepb import replicate_pb2 as pb +from . import pb_convert +from .experiment import Experiment +from .checkpoint import Checkpoint, PrimaryMetric +from . import exceptions +from . import console + +# TODO(andreas): rename to replicate-daemon +DAEMON_BINARY = os.path.join(os.path.dirname(__file__), "bin/replicate-shared") + + +def handle_error(f): + @functools.wraps(f) + def wrapped(*args, **kwargs): + try: + return f(*args, **kwargs) + except grpc.RpcError as e: + code, name = e.code().value + details = e.details() + if name == "internal": + status_code = get_status_code(e, details) + if status_code: + raise handle_exception(status_code, details) + raise Exception(details) + + return wrapped + + +def handle_exception(code, details): + if code == "DOES_NOT_EXIST": + return exceptions.DoesNotExist(details) + if code == "READ_ERROR": + return exceptions.ReadError(details) + if code == "WRITE_ERROR": + return exceptions.WriteError(details) + if code == "REPOSITORY_CONFIGURATION_ERROR": + return exceptions.RepositoryConfigurationError(details) + if code == "INCOMPATIBLE_REPOSITORY_VERSION": + return exceptions.IncompatibleRepositoryVersion(details) + if code == "CORRUPTED_REPOSITORY_SPEC": + return exceptions.CorruptedRepositorySpec(details) + if code == "CONFIG_NOT_FOUND": + return exceptions.ConfigNotFound(details) + + +def get_status_code(e, details): + metadata = e.trailing_metadata() + status_md = [x for x in metadata if is_status_detail(x)] + if status_md: + for md in status_md: + st = status_pb2.Status() + st.MergeFromString(md.value) + if st.details: + val = error_details_pb2.ErrorInfo() + st.details[0].Unpack(val) + return val.reason + return None + + +def is_status_detail(x): + return hasattr(x, "key") and x.key == "grpc-status-details-bin" + + +class Daemon: + def __init__(self, project, socket_path=None): + self.project = project + + if socket_path is None: + # create a new temporary file just to get a free name. + # the Go GRPC server will create the file. + f = tempfile.NamedTemporaryFile( + prefix="replicate-daemon-", suffix=".sock", delete=False + ) + self.socket_path = f.name + f.close() + else: + self.socket_path = socket_path + + # the Go GRPC server will fail to start if the socket file + # already exists. + os.unlink(self.socket_path) + + cmd = [DAEMON_BINARY] + if self.project.repository: + cmd += ["-R", self.project.repository] + if self.project.directory: + cmd += ["-D", self.project.directory] + cmd.append(self.socket_path) + self.process = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1 + ) + + # need to wrap stdout and stderr for this to work in jupyter + # notebooks. jupyter redefines sys.std{out,err} as custom + # writers that eventually write the output to the notebook. + self.stdout_thread = start_wrapped_pipe(self.process.stdout, sys.stdout) + self.stderr_thread = start_wrapped_pipe(self.process.stderr, sys.stderr) + + atexit.register(self.cleanup) + self.channel = grpc.insecure_channel("unix://" + self.socket_path) + self.stub = DaemonStub(self.channel) + + TIMEOUT_SEC = 15 + grpc.channel_ready_future(self.channel).result(timeout=TIMEOUT_SEC) + + # TODO(andreas): catch daemon dying (bubble up an exception so we can fail on experiment.init()) + + def cleanup(self): + if self.process.poll() is None: # check if process is still running: + # TODO(andreas): if the process takes more than a couple + # of seconds to quit, write another message indicating + # that it's still cleaning things up. + console.info("Replicate is quitting...") + + # the sigterm handler in the daemon process waits for any in-progress uploads etc. to finish. + # the sigterm handler also deletes the socket file + self.process.terminate() + self.process.wait() + + # need to join these threads to avoid "could not acquire lock" error + self.stdout_thread.join() + self.stderr_thread.join() + self.channel.close() + + @handle_error + def create_experiment( + self, + path: Optional[str], + params: Optional[Dict[str, Any]], + command: Optional[str], + python_packages: Dict[str, str], + python_version: str, + quiet: bool, + disable_hearbeat: bool, + ) -> Experiment: + pb_experiment = pb.Experiment( + params=pb_convert.value_map_to_pb(params), + path=path, + command=command, + pythonPackages=python_packages, + pythonVersion=python_version, + ) + ret = self.stub.CreateExperiment( + pb.CreateExperimentRequest( + experiment=pb_experiment, + disableHeartbeat=disable_hearbeat, + quiet=quiet, + ), + ) + return pb_convert.experiment_from_pb(self.project, ret.experiment) + + @handle_error + def create_checkpoint( + self, + experiment: Experiment, + path: Optional[str], + step: Optional[int], + metrics: Optional[Dict[str, Any]], + primary_metric: Optional[PrimaryMetric], + quiet: bool, + ) -> Checkpoint: + pb_primary_metric = pb_convert.primary_metric_to_pb(primary_metric) + pb_checkpoint = pb.Checkpoint( + metrics=pb_convert.value_map_to_pb(metrics), + path=path, + primaryMetric=pb_primary_metric, + step=step, + ) + ret = self.stub.CreateCheckpoint( + pb.CreateCheckpointRequest(checkpoint=pb_checkpoint, quiet=quiet) + ) + return pb_convert.checkpoint_from_pb(experiment, ret.checkpoint) + + @handle_error + def save_experiment( + self, experiment: Experiment, quiet: bool, + ): + pb_experiment = pb_convert.experiment_to_pb(experiment) + return self.stub.SaveExperiment( + pb.SaveExperimentRequest(experiment=pb_experiment, quiet=quiet) + ) + + @handle_error + def stop_experiment(self, experiment_id: str): + self.stub.StopExperiment(pb.StopExperimentRequest(experimentID=experiment_id)) + + @handle_error + def get_experiment(self, experiment_id_prefix: str) -> Experiment: + ret = self.stub.GetExperiment( + pb.GetExperimentRequest(experimentIDPrefix=experiment_id_prefix), + ) + return pb_convert.experiment_from_pb(self.project, ret.experiment) + + @handle_error + def list_experiments(self) -> List[Experiment]: + ret = self.stub.ListExperiments(pb.ListExperimentsRequest()) + return pb_convert.experiments_from_pb(self.project, ret.experiments) + + @handle_error + def delete_experiment(self, experiment_id: str): + self.stub.DeleteExperiment( + pb.DeleteExperimentRequest(experimentID=experiment_id) + ) + + @handle_error + def checkout_checkpoint( + self, checkpoint_id_prefix: str, output_directory: str, quiet: bool + ): + self.stub.CheckoutCheckpoint( + pb.CheckoutCheckpointRequest( + checkpointIDPrefix=checkpoint_id_prefix, + outputDirectory=output_directory, + quiet=quiet, + ), + ) + + @handle_error + def experiment_is_running(self, experiment_id: str) -> str: + ret = self.stub.GetExperimentStatus( + pb.GetExperimentStatusRequest(experimentID=experiment_id) + ) + return ret.status == pb.GetExperimentStatusReply.Status.RUNNING + + +def start_wrapped_pipe(pipe, writer): + def wrap_pipe(pipe, writer): + with pipe: + for line in iter(pipe.readline, b""): + writer.write(line) + writer.flush() + + # if writer is normal sys.std{out,err}, it can't + # write bytes directly. + # see https://stackoverflow.com/a/908440/135797 + if hasattr(writer, "buffer"): + writer = writer.buffer + + thread = threading.Thread(target=wrap_pipe, args=[pipe, writer], daemon=True) + thread.start() + return thread diff --git a/python/replicate/exceptions.py b/python/replicate/exceptions.py index d1abf722..160b1ded 100644 --- a/python/replicate/exceptions.py +++ b/python/replicate/exceptions.py @@ -1,55 +1,29 @@ from . import constants -class DoesNotExistError(Exception): +class DoesNotExist(Exception): pass -class UnknownRepositoryScheme(Exception): - def __init__(self, scheme): - if scheme == "": - message = "Missing repository scheme" - else: - message = "Unknown repository scheme: {}".format(scheme) - super().__init__( - message - + """. - -Make sure your repository URL starts with either 'file://', 's3://', or 'gs://'. -See the documentation for more details: {}""".format( - constants.YAML_REFERENCE_DOCS_URL - ) - ) +class ReadError(Exception): + pass -class ConfigNotFoundError(Exception): - def __init__(self, message): - message += """ +class WriteError(Exception): + pass -You must either create a replicate.yaml configuration file, or explicitly pass the arguments 'repository' and 'directory' to replicate.Project(). -For more information, see {}""".format( - constants.YAML_REFERENCE_DOCS_URL - ) - super().__init__(message) +class RepositoryConfigurationError(Exception): + pass -class NewerRepositoryVersion(Exception): - def __init__(self, repository_url): - message = """The repository at {} is using a newer storage mechanism which is incompatible with your version of Replicate. +class IncompatibleRepositoryVersion(Exception): + pass -To upgrade, run: -pip install --upgrade replicate -""".format( - repository_url - ) - super().__init__(message) +class CorruptedRepositorySpec(Exception): + pass -class CorruptedProjectSpec(Exception): - def __init__(self, path): - message = """The project spec file at {} is corrupted. -You can manually edit it with the format {"version": VERSION}, -where VERSION is an integer.""" - super().__init__(message) +class ConfigNotFound(Exception): + pass diff --git a/python/replicate/experiment.py b/python/replicate/experiment.py index 646fe90c..2bdb901b 100644 --- a/python/replicate/experiment.py +++ b/python/replicate/experiment.py @@ -1,8 +1,10 @@ try: # backport is incompatible with 3.7+, so we must use built-in from dataclasses import dataclass, InitVar, field + import dataclasses except ImportError: from ._vendor.dataclasses import dataclass, InitVar, field + from ._vendor import dataclasses # type: ignore import getpass import os import math @@ -23,41 +25,21 @@ ) from . import console -from .exceptions import DoesNotExistError, NewerRepositoryVersion from .checkpoint import ( Checkpoint, PrimaryMetric, CheckpointList, ) -from .hash import random_hash -from .heartbeat import Heartbeat, DEFAULT_REFRESH_INTERVAL -from .json import CustomJSONEncoder from .metadata import rfc3339_datetime, parse_rfc3339 from .packages import get_imported_packages from .system import get_python_version from .validate import check_path from .version import version -from .constants import ( - REPOSITORY_VERSION, - PYTHON_REFERENCE_DOCS_URL, - HEARTBEAT_MISS_TOLERANCE, - EXPERIMENT_STATUS_RUNNING, - EXPERIMENT_STATUS_STOPPED, -) if TYPE_CHECKING: from .project import Project -def experiment_fields_from_json(data: Dict[str, Any]) -> Dict[str, Any]: - data = data.copy() - data["created"] = parse_rfc3339(data["created"]) - data["checkpoints"] = CheckpointList( - [Checkpoint.from_json(d) for d in data.get("checkpoints", [])] - ) - return data - - @dataclass class Experiment: """ @@ -68,10 +50,10 @@ class Experiment: id: str created: datetime.datetime - user: str - host: str - command: str - config: dict + user: Optional[str] = None + host: Optional[str] = None + command: Optional[str] = None + config: Optional[dict] = None path: Optional[str] = None params: Optional[Dict[str, Any]] = None python_version: Optional[str] = None @@ -81,7 +63,6 @@ class Experiment: def __post_init__(self, project: "Project"): self._project = project - self._heartbeat = None self._step = -1 def short_id(self): @@ -123,8 +104,14 @@ def checkpoint( This saves the metrics at this point, and makes a copy of the file or directory passed to `path`, which could be weights or any other artifact. """ - # TODO(bfirsh): display warning if primary_metric changes in an experiment - # FIXME: store as tuple throughout for consistency? + # protobuf 3 doesn't have optionals, so path=None becomes "" + # and we have no way of differentiating between empty strings + # and Nones + if path == "": + raise ValueError( + "path cannot be an empty string. Please use path=None or omit path if you don't want to save any files." + ) + primary_metric_dict: Optional[PrimaryMetric] = None if primary_metric is not None: if len(primary_metric) != 2: @@ -144,80 +131,38 @@ def checkpoint( # Remember the current step self._step = step - checkpoint = Checkpoint( - id=random_hash(), - created=datetime.datetime.utcnow(), + checkpoint = self._project._daemon().create_checkpoint( + experiment=self, path=path, step=step, metrics=metrics, primary_metric=primary_metric_dict, + quiet=quiet, ) - if not quiet: - if path is None: - console.info("Creating checkpoint {}".format(checkpoint.short_id())) - else: - console.info( - "Creating checkpoint {}: copying '{}' to '{}'...".format( - checkpoint.short_id(), - checkpoint.path, - self._project._get_repository().root_url(), - ) - ) - - errors = checkpoint.validate() - if errors: - for error in errors: - console.error("Not saving checkpoint: " + error) - return checkpoint - - checkpoint._experiment = self - - # Upload files before writing metadata so if it is cancelled, there isn't metadata pointing at non-existent data - if checkpoint.path is not None: - tar_path = checkpoint._repository_tar_path() - repository = self._project._get_repository() - repository.put_path_tar(self._project.directory, tar_path, checkpoint.path) - self.checkpoints.append(checkpoint) - self.save() - - if self._heartbeat is not None: - self._heartbeat.ensure_running() - + self.save(quiet=quiet) return checkpoint - def save(self): + def save(self, quiet: bool): """ Save this experiment's metadata to repository. """ - repository = self._project._get_repository() - repository.put( - self._metadata_path(), - json.dumps(self.to_json(), indent=2, cls=CustomJSONEncoder), - ) + self._project._daemon().save_experiment(self, quiet=quiet) + return def refresh(self): """ Update this experiment with the latest data from the repository. """ - repository = self._project._get_repository() - data = json.loads( - repository.get("metadata/experiments/{}.json".format(self.id)) - ) - fields = experiment_fields_from_json(data) - for k, v in fields.items(): - setattr(self, k, v) + exp = self._project._daemon().get_experiment(experiment_id_prefix=self.id) + + for field in dataclasses.fields(exp): + if field.name != "project": + value = getattr(exp, field.name) + setattr(self, field.name, value) for chk in self.checkpoints: chk._experiment = self - @classmethod - def from_json(cls, project: "Project", data: Dict[str, Any]) -> "Experiment": - kwargs = experiment_fields_from_json(data) - experiment = Experiment(project=project, **kwargs) - for chk in experiment.checkpoints: - chk._experiment = experiment - return experiment - def to_json(self) -> Dict[str, Any]: return { "id": self.id, @@ -234,14 +179,6 @@ def to_json(self) -> Dict[str, Any]: "replicate_version": version, } - def start_heartbeat(self): - self._heartbeat = Heartbeat( - experiment_id=self.id, - repository_url=self._project._get_config()["repository"], - path=self._heartbeat_path(), - ) - self._heartbeat.start() - def stop(self): """ Stop an experiment. @@ -249,10 +186,7 @@ def stop(self): Experiments running in a script will eventually timeout, but when running in a notebook, you are required to call this method to mark an experiment as stopped. """ - if self._heartbeat is not None: - self._heartbeat.kill() - self._heartbeat = None - self._project._get_repository().delete(self._heartbeat_path()) + self._project._daemon().stop_experiment(self.id) def delete(self): """ @@ -260,18 +194,7 @@ def delete(self): """ # We should consolidate delete logic, see https://github.com/replicate/replicate/issues/332 # It's also slow https://github.com/replicate/replicate/issues/333 - repository = self._project._get_repository() - console.info( - "Deleting {} checkpoints in experiment {}".format( - len(self.checkpoints), self.short_id() - ) - ) - for checkpoint in self.checkpoints: - repository.delete(checkpoint._repository_tar_path()) - console.info("Deleting experiment: {}".format(self.short_id())) - repository.delete(self._heartbeat_path()) - repository.delete(self._repository_tar_path()) - repository.delete(self._metadata_path()) + self._project._daemon().delete_experiment(self.id) def latest(self) -> Optional[Checkpoint]: """ @@ -333,34 +256,7 @@ def is_running(self) -> bool: In case the heartbeat metadata file is not present which means the experiment was stopped the function returns False. """ - try: - repository = self._project._get_repository() - heartbeat_metadata_bytes = repository.get(self._heartbeat_path()) - heartbeat_metadata = json.loads(heartbeat_metadata_bytes) - except DoesNotExistError as e: - return False - except Exception as e: - console.warn( - "Failed to load heartbeat metadata from {}: {}".format( - self._heartbeat_path(), e - ) - ) - return False - now = datetime.datetime.utcnow() - last_heartbeat = parse_rfc3339(heartbeat_metadata["last_heartbeat"]) - last_tolerable_heartbeat = ( - now - DEFAULT_REFRESH_INTERVAL * HEARTBEAT_MISS_TOLERANCE - ) - return last_tolerable_heartbeat < last_heartbeat - - def _heartbeat_path(self) -> str: - return "metadata/heartbeats/{}.json".format(self.id) - - def _repository_tar_path(self) -> str: - return "experiments/{}.tar.gz".format(self.id) - - def _metadata_path(self) -> str: - return "metadata/experiments/{}.json".format(self.id) + return self._project._daemon().experiment_is_running(self.id) def primary_metric(self) -> str: """ @@ -470,103 +366,32 @@ class ExperimentCollection: def create( self, path=None, params=None, quiet=False, disable_heartbeat=False ) -> Experiment: - root_url = self.project._get_repository().root_url() - - # check that the project's repository version isn't - # higher than what this version of replicate can write. - # projects have to use a single consistent repository version. - project_spec = self.project._load_project_spec() - if project_spec is None: - self.project._write_project_spec(version=REPOSITORY_VERSION) - elif project_spec.version > REPOSITORY_VERSION: - raise NewerRepositoryVersion(root_url) - command = " ".join(map(shlex.quote, sys.argv)) - config = self.project._get_config() - experiment = Experiment( - project=self.project, - id=random_hash(), - created=datetime.datetime.utcnow(), + return self.project._daemon().create_experiment( path=path, params=params, - config=config, - user=os.getenv("REPLICATE_INTERNAL_USER", getpass.getuser()), - host=os.getenv("REPLICATE_INTERNAL_HOST", ""), - command=os.getenv("REPLICATE_INTERNAL_COMMAND", command), + command=command, python_version=get_python_version(), python_packages=get_imported_packages(), + quiet=quiet, + disable_hearbeat=disable_heartbeat, ) - if not quiet: - if path is None: - console.info("Creating experiment {}".format(experiment.short_id())) - else: - console.info( - "Creating experiment {}: copying '{}' to '{}'...".format( - experiment.short_id(), experiment.path, root_url, - ) - ) - - errors = experiment.validate() - if errors: - if len(errors) == 1: - s = [f"Could not create Replicate experiment: {errors[0]}"] - else: - s = ["Could not create Replicate experiment:"] - for error in errors: - s.append(f"- {error}") - s.append("") - s.append(f"For help, see the docs: {PYTHON_REFERENCE_DOCS_URL}") - raise ValueError("\n".join(s)) - - # Upload files before writing metadata so if it is cancelled, there isn't metadata pointing at non-existent data - if experiment.path is not None: - repository = self.project._get_repository() - tar_path = experiment._repository_tar_path() - repository.put_path_tar(self.project.directory, tar_path, experiment.path) - - experiment.save() - - if not disable_heartbeat: - experiment.start_heartbeat() - - return experiment - - def get(self, experiment_id) -> Experiment: + def get(self, experiment_id_prefix) -> Experiment: """ Returns the experiment with the given ID. """ - repository = self.project._get_repository() - ids = [] - for path in repository.list("metadata/experiments/"): - ids.append(os.path.basename(path).split(".")[0]) - - matching_ids = list(filter(lambda i: i.startswith(experiment_id), ids)) - if len(matching_ids) == 0: - raise DoesNotExistError( - "'{}' does not match any experiment IDs".format(experiment_id) - ) - elif len(matching_ids) > 1: - raise DoesNotExistError( - "'{}' is ambiguous - it matches {} experiment IDs".format( - experiment_id, len(matching_ids) - ) - ) - - data = json.loads( - repository.get("metadata/experiments/{}.json".format(matching_ids[0])) + return self.project._daemon().get_experiment( + experiment_id_prefix=experiment_id_prefix, ) - return Experiment.from_json(self.project, data) - def list(self, filter: Optional[Callable[[Any], bool]] = None) -> List[Experiment]: + def list(self, filter: Optional[Callable[[Any], bool]] = None) -> "ExperimentList": """ Return all experiments for a project, sorted by creation date. """ - repository = self.project._get_repository() + experiments = self.project._daemon().list_experiments() result: ExperimentList = ExperimentList() - for path in repository.list("metadata/experiments/"): - data = json.loads(repository.get(path)) - exp = Experiment.from_json(self.project, data) + for exp in experiments: if filter is not None: include = False try: diff --git a/python/replicate/heartbeat.py b/python/replicate/heartbeat.py deleted file mode 100644 index 6c7846cd..00000000 --- a/python/replicate/heartbeat.py +++ /dev/null @@ -1,66 +0,0 @@ -import sys -import datetime -import json -import time -from multiprocessing import Process - -from .repository import repository_for_url, Repository -from .metadata import rfc3339_datetime - - -DEFAULT_REFRESH_INTERVAL = datetime.timedelta(seconds=10) - - -class Heartbeat: - def __init__( - self, - experiment_id: str, - repository_url: str, - path: str, - refresh_interval: datetime.timedelta = DEFAULT_REFRESH_INTERVAL, - ): - self.experiment_id = experiment_id - self.repository_url = repository_url - self.path = path - self.refresh_interval = refresh_interval - self.process = self.make_process() - - def start(self): - self.process.start() - - def ensure_running(self): - if not self.is_alive(): - self.process = self.make_process() - self.process.start() - - def kill(self): - self.process.terminate() - - def is_alive(self): - return self.process.is_alive() - - def make_process(self) -> Process: - process = Process(target=self.heartbeat_loop) - process.daemon = True - return process - - def heartbeat_loop(self): - # need to instantitate repository here since the gcs - # client doesn't like multiprocessing: - # https://github.com/googleapis/google-cloud-python/issues/3501 - repository = repository_for_url(self.repository_url) - while True: - self.refresh(repository) - time.sleep(self.refresh_interval.total_seconds()) - - def refresh(self, repository: Repository): - obj = json.dumps( - { - "experiment_id": self.experiment_id, - "last_heartbeat": rfc3339_datetime(datetime.datetime.utcnow()), - } - ) - try: - repository.put(self.path, obj) - except Exception as e: # pylint: disable=broad-except - sys.stderr.write("Failed to save heartbeat: {}".format(e)) diff --git a/python/replicate/pb_convert.py b/python/replicate/pb_convert.py new file mode 100644 index 00000000..d90bfdb3 --- /dev/null +++ b/python/replicate/pb_convert.py @@ -0,0 +1,228 @@ +import datetime +import json +from typing import List, Dict, Any, Optional, MutableMapping + +from google.protobuf import timestamp_pb2 + +from .servicepb import replicate_pb2 as pb +from .experiment import Experiment +from .checkpoint import Checkpoint, PrimaryMetric, CheckpointList + +# We load numpy but not torch or tensorflow because numpy loads very fast and +# they're probably using it anyway +# fmt: off +try: + import numpy as np # type: ignore + has_numpy = True +except ImportError: + has_numpy = False +# fmt: on + +# Tensorflow takes a solid 10 seconds to import on a modern Macbook Pro, so instead of importing, +# do this instead +def _is_tensorflow_tensor(obj): + # e.g. __module__='tensorflow.python.framework.ops', __name__='EagerTensor' + return ( + obj.__class__.__module__.split(".")[0] == "tensorflow" + and "Tensor" in obj.__class__.__name__ + ) + + +def _is_torch_tensor(obj): + return (obj.__class__.__module__, obj.__class__.__name__) == ("torch", "Tensor") + + +def timestamp_from_pb(t: timestamp_pb2.Timestamp) -> datetime.datetime: + return datetime.datetime.fromtimestamp(t.seconds + t.nanos / 1e9) + + +def checkpoints_from_pb( + experiment: Experiment, + checkpoints_pb, # TODO(andreas): should be RepeatedCompositeFieldContainer[pb.Checkpoint], but that throws TypeError +) -> CheckpointList: + lst = CheckpointList() + for chk_pb in checkpoints_pb: + lst.append(checkpoint_from_pb(experiment, chk_pb)) + return lst + + +def checkpoint_from_pb(experiment: Experiment, chk_pb: pb.Checkpoint) -> Checkpoint: + chk = Checkpoint( + id=chk_pb.id, + created=timestamp_from_pb(chk_pb.created), + path=noneable(chk_pb.path), + step=chk_pb.step, + metrics=value_map_from_pb(chk_pb.metrics), + primary_metric=primary_metric_from_pb(chk_pb.primaryMetric), + ) + chk._experiment = experiment + return chk + + +def experiments_from_pb( + project, experiments_pb: List[pb.Experiment] +) -> List[Experiment]: + result: List[Experiment] = [] + for exp_pb in experiments_pb: + result.append(experiment_from_pb(project, exp_pb)) + return result + + +def experiment_from_pb(project, exp_pb: pb.Experiment) -> Experiment: + exp = Experiment( + project=project, + id=exp_pb.id, + created=timestamp_from_pb(exp_pb.created), + user=noneable(exp_pb.user), + host=noneable(exp_pb.host), + command=noneable(exp_pb.command), + config=config_from_pb(exp_pb.config), + path=noneable(exp_pb.path), + params=value_map_from_pb(exp_pb.params), + python_packages=noneable(exp_pb.pythonPackages), + python_version=noneable(exp_pb.pythonVersion), + replicate_version=noneable(exp_pb.replicateVersion), + ) + exp.checkpoints = checkpoints_from_pb(exp, exp_pb.checkpoints) + return exp + + +def config_from_pb(conf_pb: Optional[pb.Config]) -> Optional[Dict[str, Any]]: + if not conf_pb: + return None + if not conf_pb.repository and not conf_pb.storage: + return None + return {"repository": conf_pb.repository, "storage": conf_pb.storage} + + +def primary_metric_from_pb(pm_pb: pb.PrimaryMetric,) -> Optional[PrimaryMetric]: + if not pm_pb.name: + return None + if pm_pb.goal == pb.PrimaryMetric.Goal.MAXIMIZE: + goal = "maximize" + else: + goal = "minimize" + + return PrimaryMetric(name=pm_pb.name, goal=goal,) + + +def value_map_from_pb( + vm_pb: MutableMapping[str, pb.ParamType] +) -> Optional[Dict[str, Any]]: + if not vm_pb: + return None + return {k: value_from_pb(v) for k, v in vm_pb.items()} + + +def value_from_pb(value_pb: pb.ParamType) -> Any: + which = value_pb.WhichOneof("value") + if which == "boolValue": + return value_pb.boolValue + if which == "intValue": + return value_pb.intValue + if which == "floatValue": + return value_pb.floatValue + if which == "stringValue": + return value_pb.stringValue + if which == "objectValueJson": + return json.loads(value_pb.objectValueJson) + + +def timestamp_to_pb(t: datetime.datetime) -> timestamp_pb2.Timestamp: + return timestamp_pb2.Timestamp( + seconds=int(t.timestamp()), nanos=round((t.timestamp() % 1.0) * 1e9) + ) + + +def experiment_to_pb(exp: Experiment) -> pb.Experiment: + return pb.Experiment( + id=exp.id, + created=timestamp_to_pb(exp.created), + user=exp.user, + host=exp.host, + command=exp.command, + config=config_to_pb(exp.config), + path=exp.path, + params=value_map_to_pb(exp.params), + pythonPackages=exp.python_packages, + pythonVersion=exp.python_version, + replicateVersion=exp.replicate_version, + checkpoints=checkpoints_to_pb(exp.checkpoints), + ) + + +def config_to_pb(conf: Optional[Dict[str, Any]]) -> Optional[pb.Config]: + if conf is None: + return None + return pb.Config(repository=conf["repository"], storage=conf["storage"]) + + +def checkpoints_to_pb( + checkpoints: Optional[List[Checkpoint]], +) -> Optional[List[pb.Checkpoint]]: + if checkpoints is None: + return None + return [checkpoint_to_pb(chk) for chk in checkpoints] + + +def checkpoint_to_pb(chk: Checkpoint) -> pb.Checkpoint: + return pb.Checkpoint( + id=chk.id, + created=timestamp_to_pb(chk.created), + path=chk.path, + step=chk.step, + metrics=value_map_to_pb(chk.metrics), + primaryMetric=primary_metric_to_pb(chk.primary_metric), + ) + + +def value_map_to_pb(m: Optional[Dict[str, Any]]) -> Optional[Dict[str, pb.ParamType]]: + if m is None: + return None + return {k: value_to_pb(v) for k, v in m.items()} + + +def value_to_pb(v: Any) -> pb.ParamType: + if has_numpy: + if isinstance(v, np.integer): + return pb.ParamType(intValue=int(v)) + elif isinstance(v, np.floating): + return pb.ParamType(floatValue=float(v)) + elif isinstance(v, np.ndarray): + return pb.ParamType(objectValueJson=json.dumps(v.tolist())) + if _is_torch_tensor(v): + return pb.ParamType(objectValueJson=json.dumps(v.detach().tolist())) + if _is_tensorflow_tensor(v): + return pb.ParamType(objectValueJson=json.dumps(v.numpy().tolist())) + if isinstance(v, bool): + return pb.ParamType(boolValue=v) + if isinstance(v, int): + return pb.ParamType(intValue=v) + if isinstance(v, float): + return pb.ParamType(floatValue=v) + if isinstance(v, str): + return pb.ParamType(stringValue=v) + if isinstance(v, list): + return pb.ParamType(objectValueJson=json.dumps(v)) + if isinstance(v, dict): + return pb.ParamType(objectValueJson=json.dumps(v)) + if v is None: + return pb.ParamType(objectValueJson=json.dumps(v)) + else: + raise ValueError("Invalid value: %s", v) + + +def primary_metric_to_pb(pm: Optional[PrimaryMetric]) -> Optional[pb.PrimaryMetric]: + if pm is None: + return None + if pm["goal"] == "maximize": + goal = pb.PrimaryMetric.Goal.MAXIMIZE + else: + goal = pb.PrimaryMetric.Goal.MINIMIZE + return pb.PrimaryMetric(name=pm["name"], goal=goal) + + +def noneable(x: Any) -> Optional[Any]: + if not x: + return None + return x diff --git a/python/replicate/project.py b/python/replicate/project.py index 362d5cb0..cdd682b4 100644 --- a/python/replicate/project.py +++ b/python/replicate/project.py @@ -8,10 +8,8 @@ import json from . import console -from .config import load_config +from .daemon import Daemon from .experiment import ExperimentCollection, Experiment -from .repository import repository_for_url, Repository -from .exceptions import ConfigNotFoundError, DoesNotExistError, CorruptedProjectSpec MAX_SEARCH_DEPTH = 100 @@ -43,87 +41,18 @@ def __init__( self, repository: Optional[str] = None, directory: Optional[str] = None ): # Project is initialized on import, so don't do anything slow or anything that will raise an exception - self._directory = directory - self._repository: Optional[Repository] = None - self._repository_url = repository - self._explicit_repository = repository is not None - - @property - def directory(self) -> str: - if self._directory is None: - if self._explicit_repository: - # we raise an error here rather than in the - # constructor, because Projects can be used both - # for writing during training and for analysis. - # during analysis you don't need a root directory - - raise ValueError( - "If you pass the 'repository' argument to Project(), you also need to pass 'directory'" - ) - - self._directory = get_project_dir() - return self._directory - - def _get_config(self) -> Dict[str, Any]: - if self._explicit_repository: - return {"repository": self._repository_url} - - try: - return load_config(self.directory) - except ConfigNotFoundError: - # backwards-compatibility - # TODO(bfirsh): remove this at some point - if os.path.exists(os.path.join(self.directory, DEPRECATED_REPOSITORY_DIR)): - console.warn( - f"""replicate.yaml is now required. Create replicate.yaml with this content: - - repository: "file://{DEPRECATED_REPOSITORY_DIR}" -""" - ) - return {"repository": "file://" + DEPRECATED_REPOSITORY_DIR} - raise - - def _get_repository(self) -> Repository: - reload_repository = self._repository is None - if self._repository_url is not None: - config = self._get_config() - if config["repository"] != self._repository_url: - reload_repository = True - self._repository_url = config["repository"] - - if reload_repository: - if self._repository_url is None: - config = self._get_config() - self._repository_url = config["repository"] - - self._repository = repository_for_url(self._repository_url) - - return self._repository # type: ignore + self.directory = directory + self.repository = repository + self._daemon_instance: Optional[Daemon] = None @property def experiments(self) -> ExperimentCollection: return ExperimentCollection(self) - def _load_project_spec(self) -> Optional[ProjectSpec]: - repo = self._get_repository() - try: - raw = repo.get(self._project_spec_path()) - except DoesNotExistError as e: - return None - try: - data = json.loads(raw) - return ProjectSpec.from_json(data) - except (json.JSONDecodeError, TypeError): - raise CorruptedProjectSpec( - repo.root_url() + "/" + self._project_spec_path() - ) - - def _write_project_spec(self, version: int): - spec = ProjectSpec(version=version) - self._get_repository().put(self._project_spec_path(), spec.to_json()) - - def _project_spec_path(self) -> str: - return "repository.json" + def _daemon(self) -> Daemon: + if self._daemon_instance is None: + self._daemon_instance = Daemon(self) + return self._daemon_instance def init( @@ -138,30 +67,3 @@ def init( return project.experiments.create( path=path, params=params, disable_heartbeat=disable_heartbeat ) - - -def get_project_dir() -> str: - """ - Returns the directory of the current project. - - Similar to config.FindConfigPath() in CLI. - """ - cwd = os.getcwd() - directory = cwd - for _ in range(MAX_SEARCH_DEPTH): - for filename in ["replicate.yaml", "replicate.yml"]: - if os.path.exists(os.path.join(directory, filename)): - return directory - - # backwards-compatibility - if os.path.exists(os.path.join(directory, DEPRECATED_REPOSITORY_DIR)): - return directory - - if directory == "/": - raise ConfigNotFoundError( - "replicate.yaml was not found in {} or any of its subdirectories".format( - cwd - ) - ) - directory = os.path.dirname(directory) - return os.getcwd() diff --git a/python/replicate/repository/__init__.py b/python/replicate/repository/__init__.py deleted file mode 100644 index 5b93f01b..00000000 --- a/python/replicate/repository/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .common import repository_for_url -from .repository_base import Repository diff --git a/python/replicate/repository/common.py b/python/replicate/repository/common.py deleted file mode 100644 index 2a8d73c1..00000000 --- a/python/replicate/repository/common.py +++ /dev/null @@ -1,28 +0,0 @@ -from urllib.parse import urlparse - -from .repository_base import Repository -from .disk_repository import DiskRepository -from .gcs_repository import GCSRepository -from .s3_repository import S3Repository - -from ..exceptions import UnknownRepositoryScheme - - -def repository_for_url(url: str) -> Repository: - parsed_url = urlparse(url) - - if parsed_url.scheme == "" or parsed_url.scheme is None: - raise UnknownRepositoryScheme(scheme="") - - if parsed_url.scheme == "file": - # don't use os.path.join() here because path starts with "/" and join will treat that as root URL - return DiskRepository(root=parsed_url.netloc + parsed_url.path) - elif parsed_url.scheme == "s3": - # lazy import to speed up import replicate - - return S3Repository(bucket=parsed_url.netloc, root=parsed_url.path.lstrip("/")) - elif parsed_url.scheme == "gs": - - return GCSRepository(bucket=parsed_url.netloc, root=parsed_url.path.lstrip("/")) - else: - raise UnknownRepositoryScheme(parsed_url.scheme) diff --git a/python/replicate/repository/disk_repository.py b/python/replicate/repository/disk_repository.py deleted file mode 100644 index fd71ebab..00000000 --- a/python/replicate/repository/disk_repository.py +++ /dev/null @@ -1,120 +0,0 @@ -import os -from typing import AnyStr, List - -from .repository_base import Repository -from .. import shared -from ..exceptions import DoesNotExistError - - -class DiskRepository(Repository): - """ - Stores data on local filesystem - - Unlike the remote repositorys, some of these methods are implemented natively - because they're trivial. The complex and slow ones (e.g. put_path) we call Go. - """ - - def __init__(self, root): - self.root = root - - def root_url(self): - """ - Returns the path this repository is pointing at - """ - return self.root - - def get(self, path: str) -> bytes: - """ - Get data at path - """ - full_path = os.path.join(self.root, path) - try: - with open(full_path, "rb") as fh: - return fh.read() - except FileNotFoundError: - raise DoesNotExistError("No such path: '{}'".format(full_path)) - - def put(self, path: str, data: AnyStr): - """ - Save data to file at path - """ - full_path = os.path.join(self.root, path) - os.makedirs(os.path.dirname(full_path), exist_ok=True) - - mode = "w" - if isinstance(data, bytes): - mode = "wb" - with open(full_path, mode) as fh: - fh.write(data) - - def put_path(self, source_path: str, dest_path: str): - """ - Save file or directory to path - """ - shared.call( - "DiskRepository.PutPath", - Root=self.root, - Src=str(source_path), - Dest=str(dest_path), - ) - - def put_path_tar(self, local_path: str, tar_path: str, include_path: str): - """ - Save file or directory to tarball - """ - shared.call( - "DiskRepository.PutPathTar", - Root=self.root, - LocalPath=str(local_path), - TarPath=str(tar_path), - IncludePath=str(include_path), - ) - - def list(self, path: str) -> List[str]: - """ - Returns a list of files at path, but not any subdirectories. - - Returned paths are prefixed with the given path, that can be passed straight to Get(). - Directories are not listed. - If path does not exist, an empty list will be returned. - """ - full_path = os.path.join(self.root, path) - if not os.path.exists(full_path): - return [] - - result: List[str] = [] - for filename in os.listdir(full_path): - if os.path.isfile(os.path.join(full_path, filename)): - result.append(os.path.join(path, filename)) - return result - - def delete(self, path: str): - """ - Recursively delete path - """ - # Even though it's a simple operation we use the shared - # library to ensure consistent semantics. - shared.call( - "DiskRepository.Delete", - Root=self.root, - Path=str(path), # typecast for pathlib - ) - - def get_path_tar(self, tar_path: str, local_path: str): - """ - Extracts tarball from tar_path to local_path. - The first component of the tarball is stripped. E.g. - extracting a tarball with `abc123/weights` in it to - `/code` would create `/code/weights`. - """ - try: - shared.call( - "DiskRepository.GetPathTar", - Root=self.root, - TarPath=str(tar_path), - LocalPath=str(local_path), - ) - except shared.SharedError as e: - if e.type == "DoesNotExistError": - raise DoesNotExistError(e.message) - raise diff --git a/python/replicate/repository/gcs_repository.py b/python/replicate/repository/gcs_repository.py deleted file mode 100644 index 0bdbff42..00000000 --- a/python/replicate/repository/gcs_repository.py +++ /dev/null @@ -1,121 +0,0 @@ -from typing import AnyStr, List - -from .repository_base import Repository -from .. import shared -from ..exceptions import DoesNotExistError - - -class GCSRepository(Repository): - def __init__(self, bucket: str, root: str): - self.bucket_name = bucket - self.root = root - - def root_url(self): - """ - Returns the URL this repository is pointing at - """ - ret = "gs://" + self.bucket_name - if self.root: - ret += "/" + self.root - return ret - - def get(self, path: str) -> bytes: - """ - Get data at path - """ - try: - result = shared.call( - "GCSRepository.Get", - Bucket=self.bucket_name, - Root=self.root, - Path=str(path), # typecast for pathlib - ) - except shared.SharedError as e: - if e.type == "DoesNotExistError": - raise DoesNotExistError(e.message) - raise - return result["Data"] - - def put_path(self, source_path: str, dest_path: str): - """ - Save file or directory to path - """ - shared.call( - "GCSRepository.PutPath", - Bucket=self.bucket_name, - Root=self.root, - Src=str(source_path), - Dest=str(dest_path), - ) - - def put_path_tar(self, local_path: str, tar_path: str, include_path: str): - """ - Save file or directory to tarball - """ - shared.call( - "GCSRepository.PutPathTar", - Bucket=self.bucket_name, - Root=self.root, - LocalPath=str(local_path), - TarPath=str(tar_path), - IncludePath=str(include_path), - ) - - def put(self, path: str, data: AnyStr): - """ - Save data to file at path - """ - if isinstance(data, str): - data_bytes = data.encode("utf-8") - else: - data_bytes = data - shared.call( - "GCSRepository.Put", - Bucket=self.bucket_name, - Root=self.root, - Path=str(path), - Data=data_bytes, - ) - - def list(self, path: str) -> List[str]: - """ - Returns a list of files at path, but not any subdirectories. - """ - result = shared.call( - "GCSRepository.List", - Bucket=self.bucket_name, - Root=self.root, - Path=str(path), # typecast for pathlib - ) - return result["Paths"] - - def delete(self, path: str): - """ - Recursively delete path - """ - shared.call( - "GCSRepository.Delete", - Bucket=self.bucket_name, - Root=self.root, - Path=str(path), # typecast for pathlib - ) - - def get_path_tar(self, tar_path: str, local_path: str): - """ - Extracts tarball from tar_path to local_path. - The first component of the tarball is stripped. E.g. - extracting a tarball with `abc123/weights` in it to - `/code` would create `/code/weights`. - """ - try: - shared.call( - "GCSRepository.GetPathTar", - Bucket=self.bucket_name, - Root=self.root, - TarPath=str(tar_path), - LocalPath=str(local_path), - ) - except shared.SharedError as e: - if e.type == "DoesNotExistError": - raise DoesNotExistError(e.message) - raise diff --git a/python/replicate/repository/repository_base.py b/python/replicate/repository/repository_base.py deleted file mode 100644 index b40755d6..00000000 --- a/python/replicate/repository/repository_base.py +++ /dev/null @@ -1,63 +0,0 @@ -from abc import ABCMeta, abstractmethod -from typing import AnyStr, List - - -class Repository: - __metaclass__ = ABCMeta - - @abstractmethod - def root_url(self) -> str: - """ - Returns the path or URL this repository is pointing at - """ - raise NotImplementedError() - - @abstractmethod - def get(self, path: str) -> bytes: - """ - Get data at path - """ - raise NotImplementedError() - - @abstractmethod - def put(self, path: str, data: AnyStr): - """ - Save data to file at path - """ - raise NotImplementedError() - - def put_path(self, source_path: str, dest_path: str): - """ - Save file or directory to path on repository - """ - raise NotImplementedError() - - def put_path_tar(self, local_path: str, tar_path: str, include_path: str): - """ - Save local file or directory to tar.gz file on repository. - """ - raise NotImplementedError() - - @abstractmethod - def get_path_tar(self, tar_path: str, local_path: str): - """ - Extracts tarball from tar_path to local_path. - The first component of the tarball is stripped. E.g. - extracting a tarball with `abc123/weights` in it to - `/code` would create `/code/weights`. - """ - raise NotImplementedError() - - @abstractmethod - def list(self, path: str) -> List[str]: - """ - List files at path - """ - raise NotImplementedError() - - @abstractmethod - def delete(self, path: str): - """ - Delete single file at path - """ - raise NotImplementedError() diff --git a/python/replicate/repository/s3_repository.py b/python/replicate/repository/s3_repository.py deleted file mode 100644 index fb54e82c..00000000 --- a/python/replicate/repository/s3_repository.py +++ /dev/null @@ -1,128 +0,0 @@ -from typing import AnyStr, List - -from .repository_base import Repository -from .. import shared -from ..exceptions import DoesNotExistError - - -class S3Repository(Repository): - """ - Stores data on Amazon S3 - """ - - def __init__(self, bucket: str, root: str): - self.bucket_name = bucket - self.root = root - - def root_url(self): - """ - Returns the URL this repository is pointing at - """ - ret = "s3://" + self.bucket_name - if self.root: - ret += "/" + self.root - return ret - - def get(self, path: str) -> bytes: - """ - Get data at path - """ - try: - result = shared.call( - "S3Repository.Get", - Bucket=self.bucket_name, - Root=self.root, - Path=str(path), # typecast for pathlib - ) - except shared.SharedError as e: - if e.type == "DoesNotExistError": - raise DoesNotExistError(e.message) - raise - return result["Data"] - - def put_path(self, source_path: str, dest_path: str): - """ - Save directory to path - """ - shared.call( - "S3Repository.PutPath", - Bucket=self.bucket_name, - Root=self.root, - Src=str(source_path), - Dest=str(dest_path), - ) - - def put_path_tar(self, local_path: str, tar_path: str, include_path: str): - """ - Save file or directory to tarball - """ - shared.call( - "S3Repository.PutPathTar", - Bucket=self.bucket_name, - Root=self.root, - LocalPath=str(local_path), - TarPath=str(tar_path), - IncludePath=str(include_path), - ) - - def put(self, path: str, data: AnyStr): - """ - Save data to file at path - """ - if isinstance(data, str): - data_bytes = data.encode("utf-8") - else: - data_bytes = data - shared.call( - "S3Repository.Put", - Bucket=self.bucket_name, - Root=self.root, - Path=str(path), - Data=data_bytes, - ) - - def list(self, path: str) -> List[str]: - """ - Returns a list of files at path, but not any subdirectories. - """ - result = shared.call( - "S3Repository.List", - Bucket=self.bucket_name, - Root=self.root, - Path=str(path), # typecast for pathlib - ) - return result["Paths"] - - def exists(self, path: str) -> bool: - pass - - def delete(self, path: str): - """ - Recursively delete path - """ - shared.call( - "S3Repository.Delete", - Bucket=self.bucket_name, - Root=self.root, - Path=str(path), # typecast for pathlib - ) - - def get_path_tar(self, tar_path: str, local_path: str): - """ - Extracts tarball from tar_path to local_path. - The first component of the tarball is stripped. E.g. - extracting a tarball with `abc123/weights` in it to - `/code` would create `/code/weights`. - """ - try: - shared.call( - "S3Repository.GetPathTar", - Bucket=self.bucket_name, - Root=self.root, - TarPath=str(tar_path), - LocalPath=str(local_path), - ) - except shared.SharedError as e: - if e.type == "DoesNotExistError": - raise DoesNotExistError(e.message) - raise diff --git a/python/tests/repository/__init__.py b/python/replicate/servicepb/__init__.py similarity index 100% rename from python/tests/repository/__init__.py rename to python/replicate/servicepb/__init__.py diff --git a/python/replicate/servicepb/replicate_pb2.py b/python/replicate/servicepb/replicate_pb2.py new file mode 100644 index 00000000..4bc01163 --- /dev/null +++ b/python/replicate/servicepb/replicate_pb2.py @@ -0,0 +1,1465 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: replicate.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='replicate.proto', + package='service', + syntax='proto3', + serialized_options=b'Z/github.com/replicate/replicate/go/pkg/servicepb', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n\x0freplicate.proto\x12\x07service\x1a\x1fgoogle/protobuf/timestamp.proto\"k\n\x17\x43reateExperimentRequest\x12\'\n\nexperiment\x18\x01 \x01(\x0b\x32\x13.service.Experiment\x12\x18\n\x10\x64isableHeartbeat\x18\x02 \x01(\x08\x12\r\n\x05quiet\x18\x03 \x01(\x08\"@\n\x15\x43reateExperimentReply\x12\'\n\nexperiment\x18\x01 \x01(\x0b\x32\x13.service.Experiment\"Q\n\x17\x43reateCheckpointRequest\x12\'\n\ncheckpoint\x18\x01 \x01(\x0b\x32\x13.service.Checkpoint\x12\r\n\x05quiet\x18\x02 \x01(\x08\"@\n\x15\x43reateCheckpointReply\x12\'\n\ncheckpoint\x18\x01 \x01(\x0b\x32\x13.service.Checkpoint\"O\n\x15SaveExperimentRequest\x12\'\n\nexperiment\x18\x01 \x01(\x0b\x32\x13.service.Experiment\x12\r\n\x05quiet\x18\x02 \x01(\x08\">\n\x13SaveExperimentReply\x12\'\n\nexperiment\x18\x01 \x01(\x0b\x32\x13.service.Experiment\"-\n\x15StopExperimentRequest\x12\x14\n\x0c\x65xperimentID\x18\x01 \x01(\t\"\x15\n\x13StopExperimentReply\"2\n\x14GetExperimentRequest\x12\x1a\n\x12\x65xperimentIDPrefix\x18\x01 \x01(\t\"=\n\x12GetExperimentReply\x12\'\n\nexperiment\x18\x01 \x01(\x0b\x32\x13.service.Experiment\"\x18\n\x16ListExperimentsRequest\"@\n\x14ListExperimentsReply\x12(\n\x0b\x65xperiments\x18\x01 \x03(\x0b\x32\x13.service.Experiment\"/\n\x17\x44\x65leteExperimentRequest\x12\x14\n\x0c\x65xperimentID\x18\x01 \x01(\t\"\x17\n\x15\x44\x65leteExperimentReply\"_\n\x19\x43heckoutCheckpointRequest\x12\x1a\n\x12\x63heckpointIDPrefix\x18\x01 \x01(\t\x12\x17\n\x0foutputDirectory\x18\x02 \x01(\t\x12\r\n\x05quiet\x18\x03 \x01(\x08\"\x19\n\x17\x43heckoutCheckpointReply\"2\n\x1aGetExperimentStatusRequest\x12\x14\n\x0c\x65xperimentID\x18\x01 \x01(\t\"x\n\x18GetExperimentStatusReply\x12\x38\n\x06status\x18\x01 \x01(\x0e\x32(.service.GetExperimentStatusReply.Status\"\"\n\x06Status\x12\x0b\n\x07RUNNING\x10\x00\x12\x0b\n\x07STOPPED\x10\x01\"\xe8\x03\n\nExperiment\x12\n\n\x02id\x18\x01 \x01(\t\x12+\n\x07\x63reated\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x06params\x18\x03 \x03(\x0b\x32\x1f.service.Experiment.ParamsEntry\x12\x0c\n\x04host\x18\x04 \x01(\t\x12\x0c\n\x04user\x18\x05 \x01(\t\x12\x1f\n\x06\x63onfig\x18\x06 \x01(\x0b\x32\x0f.service.Config\x12\x0f\n\x07\x63ommand\x18\x07 \x01(\t\x12\x0c\n\x04path\x18\x08 \x01(\t\x12?\n\x0epythonPackages\x18\t \x03(\x0b\x32\'.service.Experiment.PythonPackagesEntry\x12\x15\n\rpythonVersion\x18\n \x01(\t\x12(\n\x0b\x63heckpoints\x18\x0b \x03(\x0b\x32\x13.service.Checkpoint\x12\x18\n\x10replicateVersion\x18\x0c \x01(\t\x1a\x41\n\x0bParamsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.service.ParamType:\x02\x38\x01\x1a\x35\n\x13PythonPackagesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"-\n\x06\x43onfig\x12\x12\n\nrepository\x18\x01 \x01(\t\x12\x0f\n\x07storage\x18\x02 \x01(\t\"\x87\x02\n\nCheckpoint\x12\n\n\x02id\x18\x01 \x01(\t\x12+\n\x07\x63reated\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\x07metrics\x18\x03 \x03(\x0b\x32 .service.Checkpoint.MetricsEntry\x12\x0c\n\x04step\x18\x04 \x01(\x03\x12\x0c\n\x04path\x18\x05 \x01(\t\x12-\n\rprimaryMetric\x18\x06 \x01(\x0b\x32\x16.service.PrimaryMetric\x1a\x42\n\x0cMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.service.ParamType:\x02\x38\x01\"l\n\rPrimaryMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\x04goal\x18\x02 \x01(\x0e\x32\x1b.service.PrimaryMetric.Goal\"\"\n\x04Goal\x12\x0c\n\x08MAXIMIZE\x10\x00\x12\x0c\n\x08MINIMIZE\x10\x01\"\x85\x01\n\tParamType\x12\x13\n\tboolValue\x18\x01 \x01(\x08H\x00\x12\x12\n\x08intValue\x18\x02 \x01(\x03H\x00\x12\x14\n\nfloatValue\x18\x03 \x01(\x01H\x00\x12\x15\n\x0bstringValue\x18\x04 \x01(\tH\x00\x12\x19\n\x0fobjectValueJson\x18\x05 \x01(\tH\x00\x42\x07\n\x05value2\x97\x06\n\x06\x44\x61\x65mon\x12V\n\x10\x43reateExperiment\x12 .service.CreateExperimentRequest\x1a\x1e.service.CreateExperimentReply\"\x00\x12V\n\x10\x43reateCheckpoint\x12 .service.CreateCheckpointRequest\x1a\x1e.service.CreateCheckpointReply\"\x00\x12P\n\x0eSaveExperiment\x12\x1e.service.SaveExperimentRequest\x1a\x1c.service.SaveExperimentReply\"\x00\x12P\n\x0eStopExperiment\x12\x1e.service.StopExperimentRequest\x1a\x1c.service.StopExperimentReply\"\x00\x12M\n\rGetExperiment\x12\x1d.service.GetExperimentRequest\x1a\x1b.service.GetExperimentReply\"\x00\x12S\n\x0fListExperiments\x12\x1f.service.ListExperimentsRequest\x1a\x1d.service.ListExperimentsReply\"\x00\x12V\n\x10\x44\x65leteExperiment\x12 .service.DeleteExperimentRequest\x1a\x1e.service.DeleteExperimentReply\"\x00\x12\\\n\x12\x43heckoutCheckpoint\x12\".service.CheckoutCheckpointRequest\x1a .service.CheckoutCheckpointReply\"\x00\x12_\n\x13GetExperimentStatus\x12#.service.GetExperimentStatusRequest\x1a!.service.GetExperimentStatusReply\"\x00\x42\x31Z/github.com/replicate/replicate/go/pkg/servicepbb\x06proto3' + , + dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) + + + +_GETEXPERIMENTSTATUSREPLY_STATUS = _descriptor.EnumDescriptor( + name='Status', + full_name='service.GetExperimentStatusReply.Status', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='RUNNING', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='STOPPED', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=1143, + serialized_end=1177, +) +_sym_db.RegisterEnumDescriptor(_GETEXPERIMENTSTATUSREPLY_STATUS) + +_PRIMARYMETRIC_GOAL = _descriptor.EnumDescriptor( + name='Goal', + full_name='service.PrimaryMetric.Goal', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='MAXIMIZE', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='MINIMIZE', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=2057, + serialized_end=2091, +) +_sym_db.RegisterEnumDescriptor(_PRIMARYMETRIC_GOAL) + + +_CREATEEXPERIMENTREQUEST = _descriptor.Descriptor( + name='CreateExperimentRequest', + full_name='service.CreateExperimentRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='experiment', full_name='service.CreateExperimentRequest.experiment', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='disableHeartbeat', full_name='service.CreateExperimentRequest.disableHeartbeat', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='quiet', full_name='service.CreateExperimentRequest.quiet', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=61, + serialized_end=168, +) + + +_CREATEEXPERIMENTREPLY = _descriptor.Descriptor( + name='CreateExperimentReply', + full_name='service.CreateExperimentReply', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='experiment', full_name='service.CreateExperimentReply.experiment', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=170, + serialized_end=234, +) + + +_CREATECHECKPOINTREQUEST = _descriptor.Descriptor( + name='CreateCheckpointRequest', + full_name='service.CreateCheckpointRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='checkpoint', full_name='service.CreateCheckpointRequest.checkpoint', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='quiet', full_name='service.CreateCheckpointRequest.quiet', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=236, + serialized_end=317, +) + + +_CREATECHECKPOINTREPLY = _descriptor.Descriptor( + name='CreateCheckpointReply', + full_name='service.CreateCheckpointReply', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='checkpoint', full_name='service.CreateCheckpointReply.checkpoint', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=319, + serialized_end=383, +) + + +_SAVEEXPERIMENTREQUEST = _descriptor.Descriptor( + name='SaveExperimentRequest', + full_name='service.SaveExperimentRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='experiment', full_name='service.SaveExperimentRequest.experiment', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='quiet', full_name='service.SaveExperimentRequest.quiet', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=385, + serialized_end=464, +) + + +_SAVEEXPERIMENTREPLY = _descriptor.Descriptor( + name='SaveExperimentReply', + full_name='service.SaveExperimentReply', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='experiment', full_name='service.SaveExperimentReply.experiment', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=466, + serialized_end=528, +) + + +_STOPEXPERIMENTREQUEST = _descriptor.Descriptor( + name='StopExperimentRequest', + full_name='service.StopExperimentRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='experimentID', full_name='service.StopExperimentRequest.experimentID', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=530, + serialized_end=575, +) + + +_STOPEXPERIMENTREPLY = _descriptor.Descriptor( + name='StopExperimentReply', + full_name='service.StopExperimentReply', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=577, + serialized_end=598, +) + + +_GETEXPERIMENTREQUEST = _descriptor.Descriptor( + name='GetExperimentRequest', + full_name='service.GetExperimentRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='experimentIDPrefix', full_name='service.GetExperimentRequest.experimentIDPrefix', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=600, + serialized_end=650, +) + + +_GETEXPERIMENTREPLY = _descriptor.Descriptor( + name='GetExperimentReply', + full_name='service.GetExperimentReply', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='experiment', full_name='service.GetExperimentReply.experiment', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=652, + serialized_end=713, +) + + +_LISTEXPERIMENTSREQUEST = _descriptor.Descriptor( + name='ListExperimentsRequest', + full_name='service.ListExperimentsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=715, + serialized_end=739, +) + + +_LISTEXPERIMENTSREPLY = _descriptor.Descriptor( + name='ListExperimentsReply', + full_name='service.ListExperimentsReply', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='experiments', full_name='service.ListExperimentsReply.experiments', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=741, + serialized_end=805, +) + + +_DELETEEXPERIMENTREQUEST = _descriptor.Descriptor( + name='DeleteExperimentRequest', + full_name='service.DeleteExperimentRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='experimentID', full_name='service.DeleteExperimentRequest.experimentID', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=807, + serialized_end=854, +) + + +_DELETEEXPERIMENTREPLY = _descriptor.Descriptor( + name='DeleteExperimentReply', + full_name='service.DeleteExperimentReply', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=856, + serialized_end=879, +) + + +_CHECKOUTCHECKPOINTREQUEST = _descriptor.Descriptor( + name='CheckoutCheckpointRequest', + full_name='service.CheckoutCheckpointRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='checkpointIDPrefix', full_name='service.CheckoutCheckpointRequest.checkpointIDPrefix', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='outputDirectory', full_name='service.CheckoutCheckpointRequest.outputDirectory', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='quiet', full_name='service.CheckoutCheckpointRequest.quiet', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=881, + serialized_end=976, +) + + +_CHECKOUTCHECKPOINTREPLY = _descriptor.Descriptor( + name='CheckoutCheckpointReply', + full_name='service.CheckoutCheckpointReply', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=978, + serialized_end=1003, +) + + +_GETEXPERIMENTSTATUSREQUEST = _descriptor.Descriptor( + name='GetExperimentStatusRequest', + full_name='service.GetExperimentStatusRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='experimentID', full_name='service.GetExperimentStatusRequest.experimentID', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1005, + serialized_end=1055, +) + + +_GETEXPERIMENTSTATUSREPLY = _descriptor.Descriptor( + name='GetExperimentStatusReply', + full_name='service.GetExperimentStatusReply', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='status', full_name='service.GetExperimentStatusReply.status', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _GETEXPERIMENTSTATUSREPLY_STATUS, + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1057, + serialized_end=1177, +) + + +_EXPERIMENT_PARAMSENTRY = _descriptor.Descriptor( + name='ParamsEntry', + full_name='service.Experiment.ParamsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='service.Experiment.ParamsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='service.Experiment.ParamsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=b'8\001', + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1548, + serialized_end=1613, +) + +_EXPERIMENT_PYTHONPACKAGESENTRY = _descriptor.Descriptor( + name='PythonPackagesEntry', + full_name='service.Experiment.PythonPackagesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='service.Experiment.PythonPackagesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='service.Experiment.PythonPackagesEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=b'8\001', + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1615, + serialized_end=1668, +) + +_EXPERIMENT = _descriptor.Descriptor( + name='Experiment', + full_name='service.Experiment', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='service.Experiment.id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='created', full_name='service.Experiment.created', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='params', full_name='service.Experiment.params', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='host', full_name='service.Experiment.host', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='user', full_name='service.Experiment.user', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='config', full_name='service.Experiment.config', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='command', full_name='service.Experiment.command', index=6, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='path', full_name='service.Experiment.path', index=7, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='pythonPackages', full_name='service.Experiment.pythonPackages', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='pythonVersion', full_name='service.Experiment.pythonVersion', index=9, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='checkpoints', full_name='service.Experiment.checkpoints', index=10, + number=11, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='replicateVersion', full_name='service.Experiment.replicateVersion', index=11, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_EXPERIMENT_PARAMSENTRY, _EXPERIMENT_PYTHONPACKAGESENTRY, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1180, + serialized_end=1668, +) + + +_CONFIG = _descriptor.Descriptor( + name='Config', + full_name='service.Config', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='repository', full_name='service.Config.repository', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='storage', full_name='service.Config.storage', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1670, + serialized_end=1715, +) + + +_CHECKPOINT_METRICSENTRY = _descriptor.Descriptor( + name='MetricsEntry', + full_name='service.Checkpoint.MetricsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='service.Checkpoint.MetricsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='service.Checkpoint.MetricsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=b'8\001', + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1915, + serialized_end=1981, +) + +_CHECKPOINT = _descriptor.Descriptor( + name='Checkpoint', + full_name='service.Checkpoint', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='service.Checkpoint.id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='created', full_name='service.Checkpoint.created', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='metrics', full_name='service.Checkpoint.metrics', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='step', full_name='service.Checkpoint.step', index=3, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='path', full_name='service.Checkpoint.path', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='primaryMetric', full_name='service.Checkpoint.primaryMetric', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_CHECKPOINT_METRICSENTRY, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1718, + serialized_end=1981, +) + + +_PRIMARYMETRIC = _descriptor.Descriptor( + name='PrimaryMetric', + full_name='service.PrimaryMetric', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='service.PrimaryMetric.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='goal', full_name='service.PrimaryMetric.goal', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _PRIMARYMETRIC_GOAL, + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1983, + serialized_end=2091, +) + + +_PARAMTYPE = _descriptor.Descriptor( + name='ParamType', + full_name='service.ParamType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='boolValue', full_name='service.ParamType.boolValue', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='intValue', full_name='service.ParamType.intValue', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='floatValue', full_name='service.ParamType.floatValue', index=2, + number=3, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='stringValue', full_name='service.ParamType.stringValue', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='objectValueJson', full_name='service.ParamType.objectValueJson', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='value', full_name='service.ParamType.value', + index=0, containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[]), + ], + serialized_start=2094, + serialized_end=2227, +) + +_CREATEEXPERIMENTREQUEST.fields_by_name['experiment'].message_type = _EXPERIMENT +_CREATEEXPERIMENTREPLY.fields_by_name['experiment'].message_type = _EXPERIMENT +_CREATECHECKPOINTREQUEST.fields_by_name['checkpoint'].message_type = _CHECKPOINT +_CREATECHECKPOINTREPLY.fields_by_name['checkpoint'].message_type = _CHECKPOINT +_SAVEEXPERIMENTREQUEST.fields_by_name['experiment'].message_type = _EXPERIMENT +_SAVEEXPERIMENTREPLY.fields_by_name['experiment'].message_type = _EXPERIMENT +_GETEXPERIMENTREPLY.fields_by_name['experiment'].message_type = _EXPERIMENT +_LISTEXPERIMENTSREPLY.fields_by_name['experiments'].message_type = _EXPERIMENT +_GETEXPERIMENTSTATUSREPLY.fields_by_name['status'].enum_type = _GETEXPERIMENTSTATUSREPLY_STATUS +_GETEXPERIMENTSTATUSREPLY_STATUS.containing_type = _GETEXPERIMENTSTATUSREPLY +_EXPERIMENT_PARAMSENTRY.fields_by_name['value'].message_type = _PARAMTYPE +_EXPERIMENT_PARAMSENTRY.containing_type = _EXPERIMENT +_EXPERIMENT_PYTHONPACKAGESENTRY.containing_type = _EXPERIMENT +_EXPERIMENT.fields_by_name['created'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_EXPERIMENT.fields_by_name['params'].message_type = _EXPERIMENT_PARAMSENTRY +_EXPERIMENT.fields_by_name['config'].message_type = _CONFIG +_EXPERIMENT.fields_by_name['pythonPackages'].message_type = _EXPERIMENT_PYTHONPACKAGESENTRY +_EXPERIMENT.fields_by_name['checkpoints'].message_type = _CHECKPOINT +_CHECKPOINT_METRICSENTRY.fields_by_name['value'].message_type = _PARAMTYPE +_CHECKPOINT_METRICSENTRY.containing_type = _CHECKPOINT +_CHECKPOINT.fields_by_name['created'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_CHECKPOINT.fields_by_name['metrics'].message_type = _CHECKPOINT_METRICSENTRY +_CHECKPOINT.fields_by_name['primaryMetric'].message_type = _PRIMARYMETRIC +_PRIMARYMETRIC.fields_by_name['goal'].enum_type = _PRIMARYMETRIC_GOAL +_PRIMARYMETRIC_GOAL.containing_type = _PRIMARYMETRIC +_PARAMTYPE.oneofs_by_name['value'].fields.append( + _PARAMTYPE.fields_by_name['boolValue']) +_PARAMTYPE.fields_by_name['boolValue'].containing_oneof = _PARAMTYPE.oneofs_by_name['value'] +_PARAMTYPE.oneofs_by_name['value'].fields.append( + _PARAMTYPE.fields_by_name['intValue']) +_PARAMTYPE.fields_by_name['intValue'].containing_oneof = _PARAMTYPE.oneofs_by_name['value'] +_PARAMTYPE.oneofs_by_name['value'].fields.append( + _PARAMTYPE.fields_by_name['floatValue']) +_PARAMTYPE.fields_by_name['floatValue'].containing_oneof = _PARAMTYPE.oneofs_by_name['value'] +_PARAMTYPE.oneofs_by_name['value'].fields.append( + _PARAMTYPE.fields_by_name['stringValue']) +_PARAMTYPE.fields_by_name['stringValue'].containing_oneof = _PARAMTYPE.oneofs_by_name['value'] +_PARAMTYPE.oneofs_by_name['value'].fields.append( + _PARAMTYPE.fields_by_name['objectValueJson']) +_PARAMTYPE.fields_by_name['objectValueJson'].containing_oneof = _PARAMTYPE.oneofs_by_name['value'] +DESCRIPTOR.message_types_by_name['CreateExperimentRequest'] = _CREATEEXPERIMENTREQUEST +DESCRIPTOR.message_types_by_name['CreateExperimentReply'] = _CREATEEXPERIMENTREPLY +DESCRIPTOR.message_types_by_name['CreateCheckpointRequest'] = _CREATECHECKPOINTREQUEST +DESCRIPTOR.message_types_by_name['CreateCheckpointReply'] = _CREATECHECKPOINTREPLY +DESCRIPTOR.message_types_by_name['SaveExperimentRequest'] = _SAVEEXPERIMENTREQUEST +DESCRIPTOR.message_types_by_name['SaveExperimentReply'] = _SAVEEXPERIMENTREPLY +DESCRIPTOR.message_types_by_name['StopExperimentRequest'] = _STOPEXPERIMENTREQUEST +DESCRIPTOR.message_types_by_name['StopExperimentReply'] = _STOPEXPERIMENTREPLY +DESCRIPTOR.message_types_by_name['GetExperimentRequest'] = _GETEXPERIMENTREQUEST +DESCRIPTOR.message_types_by_name['GetExperimentReply'] = _GETEXPERIMENTREPLY +DESCRIPTOR.message_types_by_name['ListExperimentsRequest'] = _LISTEXPERIMENTSREQUEST +DESCRIPTOR.message_types_by_name['ListExperimentsReply'] = _LISTEXPERIMENTSREPLY +DESCRIPTOR.message_types_by_name['DeleteExperimentRequest'] = _DELETEEXPERIMENTREQUEST +DESCRIPTOR.message_types_by_name['DeleteExperimentReply'] = _DELETEEXPERIMENTREPLY +DESCRIPTOR.message_types_by_name['CheckoutCheckpointRequest'] = _CHECKOUTCHECKPOINTREQUEST +DESCRIPTOR.message_types_by_name['CheckoutCheckpointReply'] = _CHECKOUTCHECKPOINTREPLY +DESCRIPTOR.message_types_by_name['GetExperimentStatusRequest'] = _GETEXPERIMENTSTATUSREQUEST +DESCRIPTOR.message_types_by_name['GetExperimentStatusReply'] = _GETEXPERIMENTSTATUSREPLY +DESCRIPTOR.message_types_by_name['Experiment'] = _EXPERIMENT +DESCRIPTOR.message_types_by_name['Config'] = _CONFIG +DESCRIPTOR.message_types_by_name['Checkpoint'] = _CHECKPOINT +DESCRIPTOR.message_types_by_name['PrimaryMetric'] = _PRIMARYMETRIC +DESCRIPTOR.message_types_by_name['ParamType'] = _PARAMTYPE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +CreateExperimentRequest = _reflection.GeneratedProtocolMessageType('CreateExperimentRequest', (_message.Message,), { + 'DESCRIPTOR' : _CREATEEXPERIMENTREQUEST, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.CreateExperimentRequest) + }) +_sym_db.RegisterMessage(CreateExperimentRequest) + +CreateExperimentReply = _reflection.GeneratedProtocolMessageType('CreateExperimentReply', (_message.Message,), { + 'DESCRIPTOR' : _CREATEEXPERIMENTREPLY, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.CreateExperimentReply) + }) +_sym_db.RegisterMessage(CreateExperimentReply) + +CreateCheckpointRequest = _reflection.GeneratedProtocolMessageType('CreateCheckpointRequest', (_message.Message,), { + 'DESCRIPTOR' : _CREATECHECKPOINTREQUEST, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.CreateCheckpointRequest) + }) +_sym_db.RegisterMessage(CreateCheckpointRequest) + +CreateCheckpointReply = _reflection.GeneratedProtocolMessageType('CreateCheckpointReply', (_message.Message,), { + 'DESCRIPTOR' : _CREATECHECKPOINTREPLY, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.CreateCheckpointReply) + }) +_sym_db.RegisterMessage(CreateCheckpointReply) + +SaveExperimentRequest = _reflection.GeneratedProtocolMessageType('SaveExperimentRequest', (_message.Message,), { + 'DESCRIPTOR' : _SAVEEXPERIMENTREQUEST, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.SaveExperimentRequest) + }) +_sym_db.RegisterMessage(SaveExperimentRequest) + +SaveExperimentReply = _reflection.GeneratedProtocolMessageType('SaveExperimentReply', (_message.Message,), { + 'DESCRIPTOR' : _SAVEEXPERIMENTREPLY, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.SaveExperimentReply) + }) +_sym_db.RegisterMessage(SaveExperimentReply) + +StopExperimentRequest = _reflection.GeneratedProtocolMessageType('StopExperimentRequest', (_message.Message,), { + 'DESCRIPTOR' : _STOPEXPERIMENTREQUEST, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.StopExperimentRequest) + }) +_sym_db.RegisterMessage(StopExperimentRequest) + +StopExperimentReply = _reflection.GeneratedProtocolMessageType('StopExperimentReply', (_message.Message,), { + 'DESCRIPTOR' : _STOPEXPERIMENTREPLY, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.StopExperimentReply) + }) +_sym_db.RegisterMessage(StopExperimentReply) + +GetExperimentRequest = _reflection.GeneratedProtocolMessageType('GetExperimentRequest', (_message.Message,), { + 'DESCRIPTOR' : _GETEXPERIMENTREQUEST, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.GetExperimentRequest) + }) +_sym_db.RegisterMessage(GetExperimentRequest) + +GetExperimentReply = _reflection.GeneratedProtocolMessageType('GetExperimentReply', (_message.Message,), { + 'DESCRIPTOR' : _GETEXPERIMENTREPLY, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.GetExperimentReply) + }) +_sym_db.RegisterMessage(GetExperimentReply) + +ListExperimentsRequest = _reflection.GeneratedProtocolMessageType('ListExperimentsRequest', (_message.Message,), { + 'DESCRIPTOR' : _LISTEXPERIMENTSREQUEST, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.ListExperimentsRequest) + }) +_sym_db.RegisterMessage(ListExperimentsRequest) + +ListExperimentsReply = _reflection.GeneratedProtocolMessageType('ListExperimentsReply', (_message.Message,), { + 'DESCRIPTOR' : _LISTEXPERIMENTSREPLY, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.ListExperimentsReply) + }) +_sym_db.RegisterMessage(ListExperimentsReply) + +DeleteExperimentRequest = _reflection.GeneratedProtocolMessageType('DeleteExperimentRequest', (_message.Message,), { + 'DESCRIPTOR' : _DELETEEXPERIMENTREQUEST, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.DeleteExperimentRequest) + }) +_sym_db.RegisterMessage(DeleteExperimentRequest) + +DeleteExperimentReply = _reflection.GeneratedProtocolMessageType('DeleteExperimentReply', (_message.Message,), { + 'DESCRIPTOR' : _DELETEEXPERIMENTREPLY, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.DeleteExperimentReply) + }) +_sym_db.RegisterMessage(DeleteExperimentReply) + +CheckoutCheckpointRequest = _reflection.GeneratedProtocolMessageType('CheckoutCheckpointRequest', (_message.Message,), { + 'DESCRIPTOR' : _CHECKOUTCHECKPOINTREQUEST, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.CheckoutCheckpointRequest) + }) +_sym_db.RegisterMessage(CheckoutCheckpointRequest) + +CheckoutCheckpointReply = _reflection.GeneratedProtocolMessageType('CheckoutCheckpointReply', (_message.Message,), { + 'DESCRIPTOR' : _CHECKOUTCHECKPOINTREPLY, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.CheckoutCheckpointReply) + }) +_sym_db.RegisterMessage(CheckoutCheckpointReply) + +GetExperimentStatusRequest = _reflection.GeneratedProtocolMessageType('GetExperimentStatusRequest', (_message.Message,), { + 'DESCRIPTOR' : _GETEXPERIMENTSTATUSREQUEST, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.GetExperimentStatusRequest) + }) +_sym_db.RegisterMessage(GetExperimentStatusRequest) + +GetExperimentStatusReply = _reflection.GeneratedProtocolMessageType('GetExperimentStatusReply', (_message.Message,), { + 'DESCRIPTOR' : _GETEXPERIMENTSTATUSREPLY, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.GetExperimentStatusReply) + }) +_sym_db.RegisterMessage(GetExperimentStatusReply) + +Experiment = _reflection.GeneratedProtocolMessageType('Experiment', (_message.Message,), { + + 'ParamsEntry' : _reflection.GeneratedProtocolMessageType('ParamsEntry', (_message.Message,), { + 'DESCRIPTOR' : _EXPERIMENT_PARAMSENTRY, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.Experiment.ParamsEntry) + }) + , + + 'PythonPackagesEntry' : _reflection.GeneratedProtocolMessageType('PythonPackagesEntry', (_message.Message,), { + 'DESCRIPTOR' : _EXPERIMENT_PYTHONPACKAGESENTRY, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.Experiment.PythonPackagesEntry) + }) + , + 'DESCRIPTOR' : _EXPERIMENT, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.Experiment) + }) +_sym_db.RegisterMessage(Experiment) +_sym_db.RegisterMessage(Experiment.ParamsEntry) +_sym_db.RegisterMessage(Experiment.PythonPackagesEntry) + +Config = _reflection.GeneratedProtocolMessageType('Config', (_message.Message,), { + 'DESCRIPTOR' : _CONFIG, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.Config) + }) +_sym_db.RegisterMessage(Config) + +Checkpoint = _reflection.GeneratedProtocolMessageType('Checkpoint', (_message.Message,), { + + 'MetricsEntry' : _reflection.GeneratedProtocolMessageType('MetricsEntry', (_message.Message,), { + 'DESCRIPTOR' : _CHECKPOINT_METRICSENTRY, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.Checkpoint.MetricsEntry) + }) + , + 'DESCRIPTOR' : _CHECKPOINT, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.Checkpoint) + }) +_sym_db.RegisterMessage(Checkpoint) +_sym_db.RegisterMessage(Checkpoint.MetricsEntry) + +PrimaryMetric = _reflection.GeneratedProtocolMessageType('PrimaryMetric', (_message.Message,), { + 'DESCRIPTOR' : _PRIMARYMETRIC, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.PrimaryMetric) + }) +_sym_db.RegisterMessage(PrimaryMetric) + +ParamType = _reflection.GeneratedProtocolMessageType('ParamType', (_message.Message,), { + 'DESCRIPTOR' : _PARAMTYPE, + '__module__' : 'replicate_pb2' + # @@protoc_insertion_point(class_scope:service.ParamType) + }) +_sym_db.RegisterMessage(ParamType) + + +DESCRIPTOR._options = None +_EXPERIMENT_PARAMSENTRY._options = None +_EXPERIMENT_PYTHONPACKAGESENTRY._options = None +_CHECKPOINT_METRICSENTRY._options = None + +_DAEMON = _descriptor.ServiceDescriptor( + name='Daemon', + full_name='service.Daemon', + file=DESCRIPTOR, + index=0, + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_start=2230, + serialized_end=3021, + methods=[ + _descriptor.MethodDescriptor( + name='CreateExperiment', + full_name='service.Daemon.CreateExperiment', + index=0, + containing_service=None, + input_type=_CREATEEXPERIMENTREQUEST, + output_type=_CREATEEXPERIMENTREPLY, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name='CreateCheckpoint', + full_name='service.Daemon.CreateCheckpoint', + index=1, + containing_service=None, + input_type=_CREATECHECKPOINTREQUEST, + output_type=_CREATECHECKPOINTREPLY, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name='SaveExperiment', + full_name='service.Daemon.SaveExperiment', + index=2, + containing_service=None, + input_type=_SAVEEXPERIMENTREQUEST, + output_type=_SAVEEXPERIMENTREPLY, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name='StopExperiment', + full_name='service.Daemon.StopExperiment', + index=3, + containing_service=None, + input_type=_STOPEXPERIMENTREQUEST, + output_type=_STOPEXPERIMENTREPLY, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name='GetExperiment', + full_name='service.Daemon.GetExperiment', + index=4, + containing_service=None, + input_type=_GETEXPERIMENTREQUEST, + output_type=_GETEXPERIMENTREPLY, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name='ListExperiments', + full_name='service.Daemon.ListExperiments', + index=5, + containing_service=None, + input_type=_LISTEXPERIMENTSREQUEST, + output_type=_LISTEXPERIMENTSREPLY, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name='DeleteExperiment', + full_name='service.Daemon.DeleteExperiment', + index=6, + containing_service=None, + input_type=_DELETEEXPERIMENTREQUEST, + output_type=_DELETEEXPERIMENTREPLY, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name='CheckoutCheckpoint', + full_name='service.Daemon.CheckoutCheckpoint', + index=7, + containing_service=None, + input_type=_CHECKOUTCHECKPOINTREQUEST, + output_type=_CHECKOUTCHECKPOINTREPLY, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name='GetExperimentStatus', + full_name='service.Daemon.GetExperimentStatus', + index=8, + containing_service=None, + input_type=_GETEXPERIMENTSTATUSREQUEST, + output_type=_GETEXPERIMENTSTATUSREPLY, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), +]) +_sym_db.RegisterServiceDescriptor(_DAEMON) + +DESCRIPTOR.services_by_name['Daemon'] = _DAEMON + +# @@protoc_insertion_point(module_scope) diff --git a/python/replicate/servicepb/replicate_pb2.pyi b/python/replicate/servicepb/replicate_pb2.pyi new file mode 100644 index 00000000..4501a53f --- /dev/null +++ b/python/replicate/servicepb/replicate_pb2.pyi @@ -0,0 +1,449 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, + EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, + FileDescriptor as google___protobuf___descriptor___FileDescriptor, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, +) + +from google.protobuf.internal.enum_type_wrapper import ( + _EnumTypeWrapper as google___protobuf___internal___enum_type_wrapper____EnumTypeWrapper, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from google.protobuf.timestamp_pb2 import ( + Timestamp as google___protobuf___timestamp_pb2___Timestamp, +) + +from typing import ( + Iterable as typing___Iterable, + Mapping as typing___Mapping, + MutableMapping as typing___MutableMapping, + NewType as typing___NewType, + Optional as typing___Optional, + Text as typing___Text, + cast as typing___cast, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + +DESCRIPTOR: google___protobuf___descriptor___FileDescriptor = ... + +class CreateExperimentRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + disableHeartbeat: builtin___bool = ... + quiet: builtin___bool = ... + + @property + def experiment(self) -> type___Experiment: ... + + def __init__(self, + *, + experiment : typing___Optional[type___Experiment] = None, + disableHeartbeat : typing___Optional[builtin___bool] = None, + quiet : typing___Optional[builtin___bool] = None, + ) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"experiment",b"experiment"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"disableHeartbeat",b"disableHeartbeat",u"experiment",b"experiment",u"quiet",b"quiet"]) -> None: ... +type___CreateExperimentRequest = CreateExperimentRequest + +class CreateExperimentReply(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def experiment(self) -> type___Experiment: ... + + def __init__(self, + *, + experiment : typing___Optional[type___Experiment] = None, + ) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"experiment",b"experiment"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"experiment",b"experiment"]) -> None: ... +type___CreateExperimentReply = CreateExperimentReply + +class CreateCheckpointRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + quiet: builtin___bool = ... + + @property + def checkpoint(self) -> type___Checkpoint: ... + + def __init__(self, + *, + checkpoint : typing___Optional[type___Checkpoint] = None, + quiet : typing___Optional[builtin___bool] = None, + ) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"checkpoint",b"checkpoint"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"checkpoint",b"checkpoint",u"quiet",b"quiet"]) -> None: ... +type___CreateCheckpointRequest = CreateCheckpointRequest + +class CreateCheckpointReply(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def checkpoint(self) -> type___Checkpoint: ... + + def __init__(self, + *, + checkpoint : typing___Optional[type___Checkpoint] = None, + ) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"checkpoint",b"checkpoint"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"checkpoint",b"checkpoint"]) -> None: ... +type___CreateCheckpointReply = CreateCheckpointReply + +class SaveExperimentRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + quiet: builtin___bool = ... + + @property + def experiment(self) -> type___Experiment: ... + + def __init__(self, + *, + experiment : typing___Optional[type___Experiment] = None, + quiet : typing___Optional[builtin___bool] = None, + ) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"experiment",b"experiment"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"experiment",b"experiment",u"quiet",b"quiet"]) -> None: ... +type___SaveExperimentRequest = SaveExperimentRequest + +class SaveExperimentReply(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def experiment(self) -> type___Experiment: ... + + def __init__(self, + *, + experiment : typing___Optional[type___Experiment] = None, + ) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"experiment",b"experiment"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"experiment",b"experiment"]) -> None: ... +type___SaveExperimentReply = SaveExperimentReply + +class StopExperimentRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + experimentID: typing___Text = ... + + def __init__(self, + *, + experimentID : typing___Optional[typing___Text] = None, + ) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"experimentID",b"experimentID"]) -> None: ... +type___StopExperimentRequest = StopExperimentRequest + +class StopExperimentReply(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + def __init__(self, + ) -> None: ... +type___StopExperimentReply = StopExperimentReply + +class GetExperimentRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + experimentIDPrefix: typing___Text = ... + + def __init__(self, + *, + experimentIDPrefix : typing___Optional[typing___Text] = None, + ) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"experimentIDPrefix",b"experimentIDPrefix"]) -> None: ... +type___GetExperimentRequest = GetExperimentRequest + +class GetExperimentReply(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def experiment(self) -> type___Experiment: ... + + def __init__(self, + *, + experiment : typing___Optional[type___Experiment] = None, + ) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"experiment",b"experiment"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"experiment",b"experiment"]) -> None: ... +type___GetExperimentReply = GetExperimentReply + +class ListExperimentsRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + def __init__(self, + ) -> None: ... +type___ListExperimentsRequest = ListExperimentsRequest + +class ListExperimentsReply(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + @property + def experiments(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___Experiment]: ... + + def __init__(self, + *, + experiments : typing___Optional[typing___Iterable[type___Experiment]] = None, + ) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"experiments",b"experiments"]) -> None: ... +type___ListExperimentsReply = ListExperimentsReply + +class DeleteExperimentRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + experimentID: typing___Text = ... + + def __init__(self, + *, + experimentID : typing___Optional[typing___Text] = None, + ) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"experimentID",b"experimentID"]) -> None: ... +type___DeleteExperimentRequest = DeleteExperimentRequest + +class DeleteExperimentReply(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + def __init__(self, + ) -> None: ... +type___DeleteExperimentReply = DeleteExperimentReply + +class CheckoutCheckpointRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + checkpointIDPrefix: typing___Text = ... + outputDirectory: typing___Text = ... + quiet: builtin___bool = ... + + def __init__(self, + *, + checkpointIDPrefix : typing___Optional[typing___Text] = None, + outputDirectory : typing___Optional[typing___Text] = None, + quiet : typing___Optional[builtin___bool] = None, + ) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"checkpointIDPrefix",b"checkpointIDPrefix",u"outputDirectory",b"outputDirectory",u"quiet",b"quiet"]) -> None: ... +type___CheckoutCheckpointRequest = CheckoutCheckpointRequest + +class CheckoutCheckpointReply(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + def __init__(self, + ) -> None: ... +type___CheckoutCheckpointReply = CheckoutCheckpointReply + +class GetExperimentStatusRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + experimentID: typing___Text = ... + + def __init__(self, + *, + experimentID : typing___Optional[typing___Text] = None, + ) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"experimentID",b"experimentID"]) -> None: ... +type___GetExperimentStatusRequest = GetExperimentStatusRequest + +class GetExperimentStatusReply(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + StatusValue = typing___NewType('StatusValue', builtin___int) + type___StatusValue = StatusValue + Status: _Status + class _Status(google___protobuf___internal___enum_type_wrapper____EnumTypeWrapper[GetExperimentStatusReply.StatusValue]): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + RUNNING = typing___cast(GetExperimentStatusReply.StatusValue, 0) + STOPPED = typing___cast(GetExperimentStatusReply.StatusValue, 1) + RUNNING = typing___cast(GetExperimentStatusReply.StatusValue, 0) + STOPPED = typing___cast(GetExperimentStatusReply.StatusValue, 1) + type___Status = Status + + status: type___GetExperimentStatusReply.StatusValue = ... + + def __init__(self, + *, + status : typing___Optional[type___GetExperimentStatusReply.StatusValue] = None, + ) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"status",b"status"]) -> None: ... +type___GetExperimentStatusReply = GetExperimentStatusReply + +class Experiment(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class ParamsEntry(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + key: typing___Text = ... + + @property + def value(self) -> type___ParamType: ... + + def __init__(self, + *, + key : typing___Optional[typing___Text] = None, + value : typing___Optional[type___ParamType] = None, + ) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> None: ... + type___ParamsEntry = ParamsEntry + + class PythonPackagesEntry(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + key: typing___Text = ... + value: typing___Text = ... + + def __init__(self, + *, + key : typing___Optional[typing___Text] = None, + value : typing___Optional[typing___Text] = None, + ) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> None: ... + type___PythonPackagesEntry = PythonPackagesEntry + + id: typing___Text = ... + host: typing___Text = ... + user: typing___Text = ... + command: typing___Text = ... + path: typing___Text = ... + pythonVersion: typing___Text = ... + replicateVersion: typing___Text = ... + + @property + def created(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + @property + def params(self) -> typing___MutableMapping[typing___Text, type___ParamType]: ... + + @property + def config(self) -> type___Config: ... + + @property + def pythonPackages(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... + + @property + def checkpoints(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___Checkpoint]: ... + + def __init__(self, + *, + id : typing___Optional[typing___Text] = None, + created : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + params : typing___Optional[typing___Mapping[typing___Text, type___ParamType]] = None, + host : typing___Optional[typing___Text] = None, + user : typing___Optional[typing___Text] = None, + config : typing___Optional[type___Config] = None, + command : typing___Optional[typing___Text] = None, + path : typing___Optional[typing___Text] = None, + pythonPackages : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, + pythonVersion : typing___Optional[typing___Text] = None, + checkpoints : typing___Optional[typing___Iterable[type___Checkpoint]] = None, + replicateVersion : typing___Optional[typing___Text] = None, + ) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"config",b"config",u"created",b"created"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"checkpoints",b"checkpoints",u"command",b"command",u"config",b"config",u"created",b"created",u"host",b"host",u"id",b"id",u"params",b"params",u"path",b"path",u"pythonPackages",b"pythonPackages",u"pythonVersion",b"pythonVersion",u"replicateVersion",b"replicateVersion",u"user",b"user"]) -> None: ... +type___Experiment = Experiment + +class Config(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + repository: typing___Text = ... + storage: typing___Text = ... + + def __init__(self, + *, + repository : typing___Optional[typing___Text] = None, + storage : typing___Optional[typing___Text] = None, + ) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"repository",b"repository",u"storage",b"storage"]) -> None: ... +type___Config = Config + +class Checkpoint(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + class MetricsEntry(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + key: typing___Text = ... + + @property + def value(self) -> type___ParamType: ... + + def __init__(self, + *, + key : typing___Optional[typing___Text] = None, + value : typing___Optional[type___ParamType] = None, + ) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> None: ... + type___MetricsEntry = MetricsEntry + + id: typing___Text = ... + step: builtin___int = ... + path: typing___Text = ... + + @property + def created(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + @property + def metrics(self) -> typing___MutableMapping[typing___Text, type___ParamType]: ... + + @property + def primaryMetric(self) -> type___PrimaryMetric: ... + + def __init__(self, + *, + id : typing___Optional[typing___Text] = None, + created : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + metrics : typing___Optional[typing___Mapping[typing___Text, type___ParamType]] = None, + step : typing___Optional[builtin___int] = None, + path : typing___Optional[typing___Text] = None, + primaryMetric : typing___Optional[type___PrimaryMetric] = None, + ) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"created",b"created",u"primaryMetric",b"primaryMetric"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"created",b"created",u"id",b"id",u"metrics",b"metrics",u"path",b"path",u"primaryMetric",b"primaryMetric",u"step",b"step"]) -> None: ... +type___Checkpoint = Checkpoint + +class PrimaryMetric(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + GoalValue = typing___NewType('GoalValue', builtin___int) + type___GoalValue = GoalValue + Goal: _Goal + class _Goal(google___protobuf___internal___enum_type_wrapper____EnumTypeWrapper[PrimaryMetric.GoalValue]): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + MAXIMIZE = typing___cast(PrimaryMetric.GoalValue, 0) + MINIMIZE = typing___cast(PrimaryMetric.GoalValue, 1) + MAXIMIZE = typing___cast(PrimaryMetric.GoalValue, 0) + MINIMIZE = typing___cast(PrimaryMetric.GoalValue, 1) + type___Goal = Goal + + name: typing___Text = ... + goal: type___PrimaryMetric.GoalValue = ... + + def __init__(self, + *, + name : typing___Optional[typing___Text] = None, + goal : typing___Optional[type___PrimaryMetric.GoalValue] = None, + ) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"goal",b"goal",u"name",b"name"]) -> None: ... +type___PrimaryMetric = PrimaryMetric + +class ParamType(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + boolValue: builtin___bool = ... + intValue: builtin___int = ... + floatValue: builtin___float = ... + stringValue: typing___Text = ... + objectValueJson: typing___Text = ... + + def __init__(self, + *, + boolValue : typing___Optional[builtin___bool] = None, + intValue : typing___Optional[builtin___int] = None, + floatValue : typing___Optional[builtin___float] = None, + stringValue : typing___Optional[typing___Text] = None, + objectValueJson : typing___Optional[typing___Text] = None, + ) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"boolValue",b"boolValue",u"floatValue",b"floatValue",u"intValue",b"intValue",u"objectValueJson",b"objectValueJson",u"stringValue",b"stringValue",u"value",b"value"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"boolValue",b"boolValue",u"floatValue",b"floatValue",u"intValue",b"intValue",u"objectValueJson",b"objectValueJson",u"stringValue",b"stringValue",u"value",b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"value",b"value"]) -> typing_extensions___Literal["boolValue","intValue","floatValue","stringValue","objectValueJson"]: ... +type___ParamType = ParamType diff --git a/python/replicate/servicepb/replicate_pb2_grpc.py b/python/replicate/servicepb/replicate_pb2_grpc.py new file mode 100644 index 00000000..870804a4 --- /dev/null +++ b/python/replicate/servicepb/replicate_pb2_grpc.py @@ -0,0 +1,330 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from . import replicate_pb2 as replicate__pb2 + + +class DaemonStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateExperiment = channel.unary_unary( + '/service.Daemon/CreateExperiment', + request_serializer=replicate__pb2.CreateExperimentRequest.SerializeToString, + response_deserializer=replicate__pb2.CreateExperimentReply.FromString, + ) + self.CreateCheckpoint = channel.unary_unary( + '/service.Daemon/CreateCheckpoint', + request_serializer=replicate__pb2.CreateCheckpointRequest.SerializeToString, + response_deserializer=replicate__pb2.CreateCheckpointReply.FromString, + ) + self.SaveExperiment = channel.unary_unary( + '/service.Daemon/SaveExperiment', + request_serializer=replicate__pb2.SaveExperimentRequest.SerializeToString, + response_deserializer=replicate__pb2.SaveExperimentReply.FromString, + ) + self.StopExperiment = channel.unary_unary( + '/service.Daemon/StopExperiment', + request_serializer=replicate__pb2.StopExperimentRequest.SerializeToString, + response_deserializer=replicate__pb2.StopExperimentReply.FromString, + ) + self.GetExperiment = channel.unary_unary( + '/service.Daemon/GetExperiment', + request_serializer=replicate__pb2.GetExperimentRequest.SerializeToString, + response_deserializer=replicate__pb2.GetExperimentReply.FromString, + ) + self.ListExperiments = channel.unary_unary( + '/service.Daemon/ListExperiments', + request_serializer=replicate__pb2.ListExperimentsRequest.SerializeToString, + response_deserializer=replicate__pb2.ListExperimentsReply.FromString, + ) + self.DeleteExperiment = channel.unary_unary( + '/service.Daemon/DeleteExperiment', + request_serializer=replicate__pb2.DeleteExperimentRequest.SerializeToString, + response_deserializer=replicate__pb2.DeleteExperimentReply.FromString, + ) + self.CheckoutCheckpoint = channel.unary_unary( + '/service.Daemon/CheckoutCheckpoint', + request_serializer=replicate__pb2.CheckoutCheckpointRequest.SerializeToString, + response_deserializer=replicate__pb2.CheckoutCheckpointReply.FromString, + ) + self.GetExperimentStatus = channel.unary_unary( + '/service.Daemon/GetExperimentStatus', + request_serializer=replicate__pb2.GetExperimentStatusRequest.SerializeToString, + response_deserializer=replicate__pb2.GetExperimentStatusReply.FromString, + ) + + +class DaemonServicer(object): + """Missing associated documentation comment in .proto file.""" + + def CreateExperiment(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateCheckpoint(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SaveExperiment(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StopExperiment(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetExperiment(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListExperiments(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteExperiment(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CheckoutCheckpoint(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetExperimentStatus(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_DaemonServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateExperiment': grpc.unary_unary_rpc_method_handler( + servicer.CreateExperiment, + request_deserializer=replicate__pb2.CreateExperimentRequest.FromString, + response_serializer=replicate__pb2.CreateExperimentReply.SerializeToString, + ), + 'CreateCheckpoint': grpc.unary_unary_rpc_method_handler( + servicer.CreateCheckpoint, + request_deserializer=replicate__pb2.CreateCheckpointRequest.FromString, + response_serializer=replicate__pb2.CreateCheckpointReply.SerializeToString, + ), + 'SaveExperiment': grpc.unary_unary_rpc_method_handler( + servicer.SaveExperiment, + request_deserializer=replicate__pb2.SaveExperimentRequest.FromString, + response_serializer=replicate__pb2.SaveExperimentReply.SerializeToString, + ), + 'StopExperiment': grpc.unary_unary_rpc_method_handler( + servicer.StopExperiment, + request_deserializer=replicate__pb2.StopExperimentRequest.FromString, + response_serializer=replicate__pb2.StopExperimentReply.SerializeToString, + ), + 'GetExperiment': grpc.unary_unary_rpc_method_handler( + servicer.GetExperiment, + request_deserializer=replicate__pb2.GetExperimentRequest.FromString, + response_serializer=replicate__pb2.GetExperimentReply.SerializeToString, + ), + 'ListExperiments': grpc.unary_unary_rpc_method_handler( + servicer.ListExperiments, + request_deserializer=replicate__pb2.ListExperimentsRequest.FromString, + response_serializer=replicate__pb2.ListExperimentsReply.SerializeToString, + ), + 'DeleteExperiment': grpc.unary_unary_rpc_method_handler( + servicer.DeleteExperiment, + request_deserializer=replicate__pb2.DeleteExperimentRequest.FromString, + response_serializer=replicate__pb2.DeleteExperimentReply.SerializeToString, + ), + 'CheckoutCheckpoint': grpc.unary_unary_rpc_method_handler( + servicer.CheckoutCheckpoint, + request_deserializer=replicate__pb2.CheckoutCheckpointRequest.FromString, + response_serializer=replicate__pb2.CheckoutCheckpointReply.SerializeToString, + ), + 'GetExperimentStatus': grpc.unary_unary_rpc_method_handler( + servicer.GetExperimentStatus, + request_deserializer=replicate__pb2.GetExperimentStatusRequest.FromString, + response_serializer=replicate__pb2.GetExperimentStatusReply.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'service.Daemon', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class Daemon(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def CreateExperiment(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/service.Daemon/CreateExperiment', + replicate__pb2.CreateExperimentRequest.SerializeToString, + replicate__pb2.CreateExperimentReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def CreateCheckpoint(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/service.Daemon/CreateCheckpoint', + replicate__pb2.CreateCheckpointRequest.SerializeToString, + replicate__pb2.CreateCheckpointReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def SaveExperiment(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/service.Daemon/SaveExperiment', + replicate__pb2.SaveExperimentRequest.SerializeToString, + replicate__pb2.SaveExperimentReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def StopExperiment(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/service.Daemon/StopExperiment', + replicate__pb2.StopExperimentRequest.SerializeToString, + replicate__pb2.StopExperimentReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetExperiment(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/service.Daemon/GetExperiment', + replicate__pb2.GetExperimentRequest.SerializeToString, + replicate__pb2.GetExperimentReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListExperiments(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/service.Daemon/ListExperiments', + replicate__pb2.ListExperimentsRequest.SerializeToString, + replicate__pb2.ListExperimentsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteExperiment(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/service.Daemon/DeleteExperiment', + replicate__pb2.DeleteExperimentRequest.SerializeToString, + replicate__pb2.DeleteExperimentReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def CheckoutCheckpoint(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/service.Daemon/CheckoutCheckpoint', + replicate__pb2.CheckoutCheckpointRequest.SerializeToString, + replicate__pb2.CheckoutCheckpointReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetExperimentStatus(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/service.Daemon/GetExperimentStatus', + replicate__pb2.GetExperimentStatusRequest.SerializeToString, + replicate__pb2.GetExperimentStatusReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/python/replicate/shared.py b/python/replicate/shared.py deleted file mode 100644 index 0e747f1f..00000000 --- a/python/replicate/shared.py +++ /dev/null @@ -1,92 +0,0 @@ -""" -The client for a simple RPC system to call Go. - -Each request is a subprocess, it passes the request via stdin and receives the response via stdout. -It's like CGI RPC! - -The server is in go/shared/main.go -""" - -import base64 -import json -import os -import subprocess -import sys - - -SHARED_BINARY = os.path.join(os.path.dirname(__file__), "bin/replicate-shared") - - -class InternalSharedError(Exception): - pass - - -class SharedError(Exception): - """ - An error from Go. - - Go's jsonrpc implementation doesn't let us pass error codes, so we use a predictable - prefix to indicate error type. - - You should catch this exception and check the type to raise a Python error. - """ - - def __init__(self, message): - super(SharedError, self).__init__(message) - self.type = None - self.message = message - parts = str(self.message).split(":: ", 1) - if len(parts) > 1: - self.type = parts[0] - self.message = parts[1] - - -# Here follows some bodges to let us pass binary data over json-rpc. -# Python -> Go is easy -- Go automatically decodes base64 into any []byte field. -# Go -> Python, any key called 'Data' we decode as base64. -# -# In an ideal world, we'd use a decent serialization format like protobuf or msgpack, -# but json-rpc is built into the Go stdlib and is trivial to use from Python. Either -# protobuf or msgpack would require much more implementation on the Python side, and -# we don't care about speed or size. - - -class SharedJSONEncoder(json.JSONEncoder): - def default(self, o): - if isinstance(o, bytes): - return base64.b64encode(o).decode("ascii") - return super(SharedJSONEncoder, self).default(o) - - -class SharedJSONDecoder(json.JSONDecoder): - """ - A JSON decoder that decodes any key called 'data' as base64. - """ - - def __init__(self, *args, **kwargs): - json.JSONDecoder.__init__( - self, object_hook=self.object_hook_override, *args, **kwargs - ) - - def object_hook_override(self, obj): - if "Data" in obj: - obj["Data"] = base64.b64decode(obj["Data"]) - return obj - - -def call(method, **kwargs): - request = {"version": "1.1", "method": method, "params": [kwargs]} - result = subprocess.run( - SHARED_BINARY, - input=json.dumps(request, cls=SharedJSONEncoder).encode("utf-8"), - stdout=subprocess.PIPE, - check=True, - ) - if result.stderr: - print(result.stderr, file=sys.stderr) - if not result.stdout: - raise InternalSharedError("no response from shared binary") - response = json.loads(result.stdout, cls=SharedJSONDecoder) - if response.get("error"): - raise SharedError(response["error"]) - return response["result"] diff --git a/python/tests/repository/test_disk_repository.py b/python/tests/repository/test_disk_repository.py deleted file mode 100644 index 1b64cad2..00000000 --- a/python/tests/repository/test_disk_repository.py +++ /dev/null @@ -1,84 +0,0 @@ -import os -import pathlib -import pytest # type: ignore -import tarfile -import tempfile - -from replicate.exceptions import DoesNotExistError -from replicate.repository.disk_repository import DiskRepository - - -def test_put_get(): - with tempfile.TemporaryDirectory() as tmpdir: - repository = DiskRepository(root=tmpdir) - repository.put("some/file", "nice") - assert repository.get("some/file") == b"nice" - with pytest.raises(DoesNotExistError): - repository.get("not/here") - - -def test_list(): - with tempfile.TemporaryDirectory() as tmpdir: - repository = DiskRepository(root=tmpdir) - repository.put("foo", "nice") - repository.put("some/bar", "nice") - assert repository.list("") == ["foo"] - assert repository.list("some") == ["some/bar"] - - -def test_delete(): - with tempfile.TemporaryDirectory() as tmpdir: - repository = DiskRepository(root=tmpdir) - repository.put("some/file", "nice") - assert repository.get("some/file") == b"nice" - repository.delete("some/file") - with pytest.raises(DoesNotExistError): - repository.get("some/file") - - -def test_put_path(): - with tempfile.TemporaryDirectory() as src: - src_path = pathlib.Path(src) - for path in ["foo.txt", "bar/baz.txt", "qux.txt"]: - abs_path = src_path / path - abs_path.parent.mkdir(parents=True, exist_ok=True) - with open(abs_path, "w") as f: - f.write("hello " + path) - - with tempfile.TemporaryDirectory() as root: - root_path = pathlib.Path(root) - repository = DiskRepository(root=root) - repository.put_path(src, "somedir") - assert open(root_path / "somedir/foo.txt").read() == "hello foo.txt" - assert open(root_path / "somedir/qux.txt").read() == "hello qux.txt" - assert open(root_path / "somedir/bar/baz.txt").read() == "hello bar/baz.txt" - - # single files - repository.put_path(os.path.join(src, "foo.txt"), "singlefile/foo.txt") - assert open(root_path / "singlefile/foo.txt").read() == "hello foo.txt" - - -def test_get_put_path_tar(): - with tempfile.TemporaryDirectory() as src: - src_path = pathlib.Path(src) - for path in ["foo.txt", "bar/baz.txt", "qux.txt"]: - abs_path = src_path / path - abs_path.parent.mkdir(parents=True, exist_ok=True) - with open(abs_path, "w") as f: - f.write("hello " + path) - - with tempfile.TemporaryDirectory() as root: - root_path = pathlib.Path(root) - repository = DiskRepository(root=root) - repository.put_path_tar(src, "dest.tar.gz", "") - - with tempfile.TemporaryDirectory() as out: - out = pathlib.Path(out) - with tarfile.open(root_path / "dest.tar.gz") as tar: - tar.extractall(out) - assert open(out / "dest/foo.txt").read() == "hello foo.txt" - - with tempfile.TemporaryDirectory() as out: - repository.get_path_tar("dest.tar.gz", out) - out = pathlib.Path(out) - assert open(out / "foo.txt").read() == "hello foo.txt" diff --git a/python/tests/repository/test_gcs_repository.py b/python/tests/repository/test_gcs_repository.py deleted file mode 100644 index 4169c315..00000000 --- a/python/tests/repository/test_gcs_repository.py +++ /dev/null @@ -1,203 +0,0 @@ -import os -import random -import string -import tempfile -from pathlib import Path -import pytest # type: ignore -from google.cloud import storage -from google.api_core.exceptions import NotFound - -from replicate.exceptions import DoesNotExistError -from replicate.repository.gcs_repository import GCSRepository -from replicate.hash import random_hash - - -# Disable this test with -m "not external" -pytestmark = pytest.mark.external - -# We only create one bucket for these tests, because Google Cloud rate limits creating buckets -# https://cloud.google.com/storage/quotas -# This means these tests can't be run in parallel -@pytest.fixture(scope="session") -def temp_bucket_create(): - bucket_name = "replicate-test-" + random_hash(20) - - client = storage.Client() - bucket = client.create_bucket(bucket_name) - try: - bucket.reload() - assert bucket.exists() - yield bucket - finally: - bucket.delete(force=True) - - -@pytest.fixture(scope="function") -def temp_bucket(temp_bucket_create): - bucket = temp_bucket_create - # Clear bucket before each test - blobs = bucket.list_blobs() - for blob in blobs: - blob.delete() - yield bucket - - -def test_put_get(temp_bucket): - repository = GCSRepository(bucket=temp_bucket.name, root="") - repository.put("foo/bar.txt", "nice") - assert temp_bucket.blob("foo/bar.txt").download_as_bytes() == b"nice" - assert repository.get("foo/bar.txt") == b"nice" - - -def test_put_get_with_root(temp_bucket): - repository = GCSRepository(bucket=temp_bucket.name, root="someroot") - repository.put("foo/bar.txt", "nice") - assert temp_bucket.blob("someroot/foo/bar.txt").download_as_bytes() == b"nice" - assert repository.get("foo/bar.txt") == b"nice" - - -def test_get_not_exists(temp_bucket): - repository = GCSRepository(bucket=temp_bucket.name, root="") - with pytest.raises(DoesNotExistError): - assert repository.get("foo/bar.txt") - - -def test_list(temp_bucket): - repository = GCSRepository(bucket=temp_bucket.name, root="") - repository.put("foo", "nice") - repository.put("some/bar", "nice") - assert repository.list("") == ["foo"] - assert repository.list("some") == ["some/bar"] - - -def test_put_path(temp_bucket, tmpdir): - repository = GCSRepository(bucket=temp_bucket.name, root="") - - for path in ["foo.txt", "bar/baz.txt", "qux.txt"]: - abs_path = os.path.join(tmpdir, path) - os.makedirs(os.path.dirname(abs_path), exist_ok=True) - with open(abs_path, "w") as f: - f.write("hello " + path) - - repository.put_path(tmpdir, "folder") - assert temp_bucket.blob("folder/foo.txt").download_as_bytes() == b"hello foo.txt" - assert temp_bucket.blob("folder/qux.txt").download_as_bytes() == b"hello qux.txt" - assert ( - temp_bucket.blob("folder/bar/baz.txt").download_as_bytes() - == b"hello bar/baz.txt" - ) - - # single files - repository.put_path(os.path.join(tmpdir, "foo.txt"), "singlefile/foo.txt") - assert ( - temp_bucket.blob("singlefile/foo.txt").download_as_bytes() == b"hello foo.txt" - ) - - -def test_put_path_with_root(temp_bucket, tmpdir): - repository = GCSRepository(bucket=temp_bucket.name, root="someroot") - - for path in ["foo.txt", "bar/baz.txt", "qux.txt"]: - abs_path = os.path.join(tmpdir, path) - os.makedirs(os.path.dirname(abs_path), exist_ok=True) - with open(abs_path, "w") as f: - f.write("hello " + path) - - repository.put_path(tmpdir, "folder") - assert ( - temp_bucket.blob("someroot/folder/foo.txt").download_as_bytes() - == b"hello foo.txt" - ) - assert ( - temp_bucket.blob("someroot/folder/qux.txt").download_as_bytes() - == b"hello qux.txt" - ) - assert ( - temp_bucket.blob("someroot/folder/bar/baz.txt").download_as_bytes() - == b"hello bar/baz.txt" - ) - - # single files - repository.put_path(os.path.join(tmpdir, "foo.txt"), "singlefile/foo.txt") - assert ( - temp_bucket.blob("someroot/singlefile/foo.txt").download_as_bytes() - == b"hello foo.txt" - ) - - -def test_replicateignore(temp_bucket, tmpdir): - repository = GCSRepository(bucket=temp_bucket.name, root="") - - for path in [ - "foo.txt", - "bar/baz.txt", - "bar/quux.xyz", - "bar/new-qux.txt", - "qux.xyz", - ]: - abs_path = os.path.join(tmpdir, path) - os.makedirs(os.path.dirname(abs_path), exist_ok=True) - with open(abs_path, "w") as f: - f.write("hello " + path) - - with open(os.path.join(tmpdir, ".replicateignore"), "w") as f: - f.write( - """ -# this is a comment -baz.txt -*.xyz -""" - ) - - repository.put_path(tmpdir, "folder") - assert temp_bucket.blob("folder/foo.txt").download_as_bytes() == b"hello foo.txt" - assert ( - temp_bucket.blob("folder/bar/new-qux.txt").download_as_bytes() - == b"hello bar/new-qux.txt" - ) - with pytest.raises(NotFound): - temp_bucket.blob("folder/bar/baz.txt").download_as_bytes() - with pytest.raises(NotFound): - temp_bucket.blob("folder/qux.xyz").download_as_bytes() - with pytest.raises(NotFound): - temp_bucket.blob("folder/bar/quux.xyz").download_as_bytes() - - -def test_delete(temp_bucket, tmpdir): - repository = GCSRepository(bucket=temp_bucket.name, root="") - - repository.put("some/file", "nice") - assert repository.get("some/file") == b"nice" - - repository.delete("some/file") - with pytest.raises(DoesNotExistError): - repository.get("some/file") - - -def test_delete_with_root(temp_bucket, tmpdir): - repository = GCSRepository(bucket=temp_bucket.name, root="my-root") - - repository.put("some/file", "nice") - assert repository.get("some/file") == b"nice" - - repository.delete("some/file") - with pytest.raises(DoesNotExistError): - repository.get("some/file") - - -def test_get_put_path_tar(temp_bucket): - with tempfile.TemporaryDirectory() as src: - src_path = Path(src) - for path in ["foo.txt", "bar/baz.txt", "qux.txt"]: - abs_path = src_path / path - abs_path.parent.mkdir(parents=True, exist_ok=True) - with open(abs_path, "w") as f: - f.write("hello " + path) - - repository = GCSRepository(bucket=temp_bucket.name, root="") - repository.put_path_tar(src, "dest.tar.gz", "") - - with tempfile.TemporaryDirectory() as out: - repository.get_path_tar("dest.tar.gz", out) - out = Path(out) - assert open(out / "foo.txt").read() == "hello foo.txt" diff --git a/python/tests/repository/test_repository.py b/python/tests/repository/test_repository.py deleted file mode 100644 index 9d293ccb..00000000 --- a/python/tests/repository/test_repository.py +++ /dev/null @@ -1,54 +0,0 @@ -import pytest # type: ignore - -from replicate.repository import repository_for_url -from replicate.repository.disk_repository import DiskRepository -from replicate.repository.s3_repository import S3Repository -from replicate.repository.gcs_repository import GCSRepository -from replicate.exceptions import UnknownRepositoryScheme - - -# parallel of go/pkg/repository/repository_test.go - - -def test_disk_repository(): - repository = repository_for_url("file:///foo/bar") - assert isinstance(repository, DiskRepository) - assert repository.root == "/foo/bar" - - repository = repository_for_url("file://foo/bar") - assert isinstance(repository, DiskRepository) - assert repository.root == "foo/bar" - - -def test_s3_repository(): - repository = repository_for_url("s3://my-bucket") - assert isinstance(repository, S3Repository) - assert repository.bucket_name == "my-bucket" - assert repository.root == "" - - repository = repository_for_url("s3://my-bucket/foo") - assert isinstance(repository, S3Repository) - assert repository.bucket_name == "my-bucket" - assert repository.root == "foo" - - -def test_gcs_repository(): - repository = repository_for_url("gs://my-bucket") - assert isinstance(repository, GCSRepository) - assert repository.bucket_name == "my-bucket" - assert repository.root == "" - - repository = repository_for_url("gs://my-bucket/foo") - assert isinstance(repository, GCSRepository) - assert repository.bucket_name == "my-bucket" - assert repository.root == "foo" - - -def test_unknown_repository(): - with pytest.raises(UnknownRepositoryScheme): - repository_for_url("foo://my-bucket") - - -def test_missing_scheme(): - with pytest.raises(UnknownRepositoryScheme): - repository_for_url("/foo/bar") diff --git a/python/tests/repository/test_s3_repository.py b/python/tests/repository/test_s3_repository.py deleted file mode 100644 index df99a1ad..00000000 --- a/python/tests/repository/test_s3_repository.py +++ /dev/null @@ -1,134 +0,0 @@ -import json -import os -import tempfile -from pathlib import Path -import pytest # type: ignore -import boto3 # type: ignore - -import replicate -from replicate.hash import random_hash -from replicate.repository.s3_repository import S3Repository -from replicate.exceptions import DoesNotExistError - -# Disable this test with -m "not external" -pytestmark = pytest.mark.external - - -@pytest.fixture(scope="session") -def temp_bucket_create(): - # We intentionally don't create the bucket to test Replicate's ability to create buckets - bucket_name = "replicate-test-unit-" + random_hash()[:20] - yield bucket_name - - # Delete bucket once at end of session - s3 = boto3.resource("s3") - bucket = s3.Bucket(bucket_name) - bucket.objects.all().delete() - bucket.delete() - - -@pytest.fixture(scope="function") -def temp_bucket(temp_bucket_create): - bucket_name = temp_bucket_create - - yield bucket_name - - # Clear all objects after each test - s3 = boto3.resource("s3") - bucket = s3.Bucket(bucket_name) - bucket.objects.all().delete() - - -def test_s3_experiment(temp_bucket, tmpdir): - replicate_yaml_contents = "repository: s3://{bucket}".format(bucket=temp_bucket) - - with open(os.path.join(tmpdir, "replicate.yaml"), "w") as f: - f.write(replicate_yaml_contents) - - current_workdir = os.getcwd() - try: - os.chdir(tmpdir) - experiment = replicate.init( - path=".", params={"foo": "bar"}, disable_heartbeat=True - ) - checkpoint = experiment.checkpoint( - path=".", step=10, metrics={"loss": 1.1, "baz": "qux"} - ) - - meta = s3_read_json( - temp_bucket, - os.path.join("metadata", "experiments", experiment.id + ".json"), - ) - assert meta["id"] == experiment.id - assert meta["created"] == experiment.created.isoformat() + "Z" - assert meta["params"] == {"foo": "bar"} - assert meta["checkpoints"] == [ - { - "id": checkpoint.id, - "created": checkpoint.created.isoformat() + "Z", - "step": 10, - "metrics": {"loss": 1.1, "baz": "qux"}, - "path": ".", - "primary_metric": None, - } - ] - - finally: - os.chdir(current_workdir) - - -def test_list(temp_bucket): - repository = S3Repository(bucket=temp_bucket, root="") - repository.put("foo", "nice") - repository.put("some/bar", "nice") - assert repository.list("") == ["foo"] - assert repository.list("some") == ["some/bar"] - - -def s3_read(bucket, path): - s3 = boto3.client("s3") - return s3.get_object(Bucket=bucket, Key=path)["Body"].read() - - -def s3_read_json(bucket, path): - return json.loads(s3_read(bucket, path)) - - -def test_delete(temp_bucket, tmpdir): - repository = S3Repository(bucket=temp_bucket, root="") - - repository.put("some/file", "nice") - assert repository.get("some/file") == b"nice" - - repository.delete("some/file") - with pytest.raises(DoesNotExistError): - repository.get("some/file") - - -def test_delete_with_root(temp_bucket, tmpdir): - repository = S3Repository(bucket=temp_bucket, root="my-root") - - repository.put("some/file", "nice") - assert repository.get("some/file") == b"nice" - - repository.delete("some/file") - with pytest.raises(DoesNotExistError): - repository.get("some/file") - - -def test_get_put_path_tar(temp_bucket): - with tempfile.TemporaryDirectory() as src: - src_path = Path(src) - for path in ["foo.txt", "bar/baz.txt", "qux.txt"]: - abs_path = src_path / path - abs_path.parent.mkdir(parents=True, exist_ok=True) - with open(abs_path, "w") as f: - f.write("hello " + path) - - repository = S3Repository(bucket=temp_bucket, root="") - repository.put_path_tar(src, "dest.tar.gz", "") - - with tempfile.TemporaryDirectory() as out: - repository.get_path_tar("dest.tar.gz", out) - out = Path(out) - assert open(out / "foo.txt").read() == "hello foo.txt" diff --git a/python/tests/test_checkpoint.py b/python/tests/test_checkpoint.py index e33ec723..b9b0375f 100644 --- a/python/tests/test_checkpoint.py +++ b/python/tests/test_checkpoint.py @@ -1,14 +1,15 @@ try: import dataclasses -except ImportError: +except (ImportError, ModuleNotFoundError): from replicate._vendor import dataclasses import datetime +import time import os import pytest +from waiting import wait from replicate.checkpoint import Checkpoint, CheckpointList -from replicate.exceptions import DoesNotExistError -from replicate.experiment import Experiment +from replicate.exceptions import DoesNotExist from replicate.project import Project from tests.factories import experiment_factory, checkpoint_factory @@ -108,6 +109,12 @@ def test_checkout(self, temp_workdir, tmpdir_factory): f.write("bar") chk = exp.checkpoint(path="bar.txt", metrics={"accuracy": "awesome"}) + chk_tar_path = os.path.join(".replicate/checkpoints", chk.id + ".tar.gz") + wait( + lambda: os.path.exists(chk_tar_path), timeout_seconds=5, sleep_seconds=0.01, + ) + time.sleep(0.1) # wait to finish writing + # test with already existing checkpoint tmpdir = tmpdir_factory.mktemp("checkout") chk.checkout(output_directory=str(tmpdir)) @@ -130,12 +137,19 @@ def test_checkout(self, temp_workdir, tmpdir_factory): exp = project.experiments.create(params={"foo": "bar"}, disable_heartbeat=True) chk = exp.checkpoint(metrics={"accuracy": "awesome"}) tmpdir = tmpdir_factory.mktemp("checkout") - with pytest.raises(DoesNotExistError): + with pytest.raises(DoesNotExist): chk.checkout(output_directory=str(tmpdir)) # test experiment with no path exp = project.experiments.create(params={"foo": "bar"}, disable_heartbeat=True) chk = exp.checkpoint(path="bar.txt", metrics={"accuracy": "awesome"}) + + chk_tar_path = os.path.join(".replicate/checkpoints", chk.id + ".tar.gz") + wait( + lambda: os.path.exists(chk_tar_path), timeout_seconds=5, sleep_seconds=0.01, + ) + time.sleep(0.1) # wait to finish writing + tmpdir = tmpdir_factory.mktemp("checkout") chk.checkout(output_directory=str(tmpdir)) assert not os.path.exists(tmpdir / "foo.txt") @@ -147,6 +161,13 @@ def test_checkout(self, temp_workdir, tmpdir_factory): path="foo.txt", params={"foo": "bar"}, disable_heartbeat=True ) chk = exp.checkpoint(metrics={"accuracy": "awesome"}) + + exp_tar_path = os.path.join(".replicate/experiments", exp.id + ".tar.gz") + wait( + lambda: os.path.exists(exp_tar_path), timeout_seconds=5, sleep_seconds=0.01, + ) + time.sleep(0.1) # wait to finish writing + tmpdir = tmpdir_factory.mktemp("checkout") chk.checkout(output_directory=str(tmpdir)) assert not os.path.exists(tmpdir / "bar.txt") @@ -168,6 +189,12 @@ def test_open(self, temp_workdir): f.write("bar") chk = exp.checkpoint(path="bar.txt", metrics={"accuracy": "awesome"}) + chk_tar_path = os.path.join(".replicate/checkpoints", chk.id + ".tar.gz") + wait( + lambda: os.path.exists(chk_tar_path), timeout_seconds=5, sleep_seconds=0.01, + ) + time.sleep(0.1) # wait to finish writing + # test with already existing checkpoint assert chk.open("foo.txt").read().decode() == "foo" assert chk.open("bar.txt").read().decode() == "bar" diff --git a/python/tests/test_config.py b/python/tests/test_config.py deleted file mode 100644 index 424a7054..00000000 --- a/python/tests/test_config.py +++ /dev/null @@ -1,50 +0,0 @@ -import pytest # type: ignore -import os - -from replicate.config import ( - load_config, - validate_and_set_defaults, - ConfigValidationError, -) - -from replicate.exceptions import ConfigNotFoundError - - -@pytest.mark.parametrize("config_filename", ["replicate.yaml", "replicate.yml"]) -def test_load_config_blank(tmp_path, config_filename): - config_file = tmp_path / config_filename - config_file.write_text("") - - with pytest.raises(ConfigValidationError): - load_config(tmp_path) - - -def test_load_without_config_blank(tmp_path): - with pytest.raises(ConfigNotFoundError): - load_config(tmp_path) - - -def test_validate(): - validate_and_set_defaults({"repository": "s3://foobar"}, "/foo") - with pytest.raises(ConfigValidationError): - validate_and_set_defaults({"invalid": "key"}, "/foo") - with pytest.raises(ConfigValidationError): - validate_and_set_defaults({"repository": 1234}, "/foo") - with pytest.raises(ConfigValidationError): - validate_and_set_defaults( - {"repository": "s3://foobar", "something": "else"}, "/foo" - ) - - assert validate_and_set_defaults({"repository": "s3://foobar"}, "/foo") == { - "repository": "s3://foobar", - } - - -def test_storage_backwards_compatible(): - assert validate_and_set_defaults({"storage": "s3://foobar"}, "/foo") == { - "repository": "s3://foobar", - } - with pytest.raises(ConfigValidationError): - validate_and_set_defaults( - {"storage": "s3://foobar", "repository": "s3://foobar"}, "/foo" - ) diff --git a/python/tests/test_experiment.py b/python/tests/test_experiment.py index 36299a3f..4794a274 100644 --- a/python/tests/test_experiment.py +++ b/python/tests/test_experiment.py @@ -2,27 +2,28 @@ import dataclasses except ImportError: from replicate._vendor import dataclasses +import math import datetime import json import os import pytest # type: ignore import tarfile import tempfile +import time from pathlib import Path from unittest.mock import patch from waiting import wait import replicate from replicate.exceptions import ( - DoesNotExistError, - ConfigNotFoundError, - NewerRepositoryVersion, + DoesNotExist, + ConfigNotFound, + IncompatibleRepositoryVersion, ) from replicate.experiment import Experiment, ExperimentList from replicate.project import Project -from replicate.heartbeat import DEFAULT_REFRESH_INTERVAL -from replicate.constants import HEARTBEAT_MISS_TOLERANCE from replicate.metadata import rfc3339_datetime + from tests.factories import experiment_factory, checkpoint_factory @@ -41,6 +42,14 @@ def test_init_and_checkpoint(temp_workdir): path=".", params={"learning_rate": 0.002}, disable_heartbeat=True ) + experiment_tar_path = ".replicate/experiments/{}.tar.gz".format(experiment.id) + wait( + lambda: os.path.exists(experiment_tar_path), + timeout_seconds=5, + sleep_seconds=0.01, + ) + time.sleep(0.1) # wait for file to be written + assert len(experiment.id) == 64 with open(".replicate/metadata/experiments/{}.json".format(experiment.id)) as fh: metadata = json.load(fh) @@ -48,9 +57,7 @@ def test_init_and_checkpoint(temp_workdir): assert metadata["params"] == {"learning_rate": 0.002} with tempfile.TemporaryDirectory() as tmpdir: - with tarfile.open( - ".replicate/experiments/{}.tar.gz".format(experiment.id) - ) as tar: + with tarfile.open(experiment_tar_path) as tar: tar.extractall(tmpdir) assert ( @@ -67,6 +74,14 @@ def test_init_and_checkpoint(temp_workdir): path="weights", step=1, metrics={"validation_loss": 0.123} ) + checkpoint_tar_path = ".replicate/checkpoints/{}.tar.gz".format(checkpoint.id) + wait( + lambda: os.path.exists(checkpoint_tar_path), + timeout_seconds=5, + sleep_seconds=0.01, + ) + time.sleep(0.1) # wait for file to be written + assert len(checkpoint.id) == 64 with open(".replicate/metadata/experiments/{}.json".format(experiment.id)) as fh: metadata = json.load(fh) @@ -77,9 +92,7 @@ def test_init_and_checkpoint(temp_workdir): assert checkpoint_metadata["metrics"] == {"validation_loss": 0.123} with tempfile.TemporaryDirectory() as tmpdir: - with tarfile.open( - ".replicate/checkpoints/{}.tar.gz".format(checkpoint.id) - ) as tar: + with tarfile.open(checkpoint_tar_path) as tar: tar.extractall(tmpdir) assert open(os.path.join(tmpdir, checkpoint.id, "weights")).read() == "1.2kg" @@ -94,10 +107,16 @@ def test_init_and_checkpoint(temp_workdir): path="data", step=1, metrics={"validation_loss": 0.123} ) + checkpoint_tar_path = ".replicate/checkpoints/{}.tar.gz".format(checkpoint.id) + wait( + lambda: os.path.exists(checkpoint_tar_path), + timeout_seconds=5, + sleep_seconds=0.01, + ) + time.sleep(0.1) # wait for file to be written + with tempfile.TemporaryDirectory() as tmpdir: - with tarfile.open( - ".replicate/checkpoints/{}.tar.gz".format(checkpoint.id) - ) as tar: + with tarfile.open(checkpoint_tar_path) as tar: tar.extractall(tmpdir) assert ( @@ -109,6 +128,10 @@ def test_init_and_checkpoint(temp_workdir): checkpoint = experiment.checkpoint( path=None, step=1, metrics={"validation_loss": 0.123} ) + + # wait in case async process tries to create a path anyway + time.sleep(0.5) + with open(".replicate/metadata/experiments/{}.json".format(experiment.id)) as fh: metadata = json.load(fh) assert metadata["checkpoints"][-1]["id"] == checkpoint.id @@ -118,10 +141,17 @@ def test_init_and_checkpoint(temp_workdir): experiment = replicate.init( path="train.py", params={"learning_rate": 0.002}, disable_heartbeat=True ) + + experiment_tar_path = ".replicate/experiments/{}.tar.gz".format(experiment.id) + wait( + lambda: os.path.exists(experiment_tar_path), + timeout_seconds=5, + sleep_seconds=0.01, + ) + time.sleep(0.1) # wait for file to be written + with tempfile.TemporaryDirectory() as tmpdir: - with tarfile.open( - ".replicate/experiments/{}.tar.gz".format(experiment.id) - ) as tar: + with tarfile.open(experiment_tar_path) as tar: tar.extractall(tmpdir) assert ( @@ -134,6 +164,10 @@ def test_init_and_checkpoint(temp_workdir): experiment = replicate.init( path=None, params={"learning_rate": 0.002}, disable_heartbeat=True ) + + # wait in case async process tries to create a path anyway + time.sleep(0.5) + with open(".replicate/metadata/experiments/{}.json".format(experiment.id)) as fh: metadata = json.load(fh) assert metadata["id"] == experiment.id @@ -150,49 +184,16 @@ def test_init_with_config_file(temp_workdir): def test_init_without_config_file(temp_workdir): - with pytest.raises(ConfigNotFoundError): + with pytest.raises(ConfigNotFound): replicate.init() -def test_heartbeat(temp_workdir): - with open("replicate.yaml", "w") as f: - f.write("repository: file://.replicate/") - - experiment = replicate.init() - heartbeat_path = f".replicate/metadata/heartbeats/{experiment.id}.json" - wait(lambda: os.path.exists(heartbeat_path), timeout_seconds=1, sleep_seconds=0.01) - assert json.load(open(heartbeat_path))["experiment_id"] == experiment.id - experiment.stop() - assert not os.path.exists(heartbeat_path) - - # check starting and stopping immediately doesn't do anything weird - experiment = replicate.init() - experiment.stop() - - -def test_deprecated_repository_backwards_compatible(temp_workdir): - os.makedirs(".replicate/storage") - experiment = replicate.init() - assert isinstance(experiment, Experiment) - assert experiment._project._repository_url == "file://.replicate/storage" - experiment.stop() - - with open("replicate.yaml", "w") as f: - f.write("repository: file://foobar") - experiment = replicate.init() - assert isinstance(experiment, Experiment) - assert experiment._project._repository_url == "file://foobar" - experiment.stop() - - def test_project_repository_version(temp_workdir): with open("replicate.yaml", "w") as f: f.write("repository: file://.replicate") experiment = replicate.init() - expected = """{ - "version": 1 -}""" + expected = """{"version":1}""" with open(".replicate/repository.json") as f: assert f.read() == expected @@ -203,12 +204,8 @@ def test_project_repository_version(temp_workdir): assert f.read() == expected with open(".replicate/repository.json", "w") as f: - f.write( - """{ - "version": 2 -}""" - ) - with pytest.raises(NewerRepositoryVersion): + f.write("""{"version":2}""") + with pytest.raises(IncompatibleRepositoryVersion): replicate.init() @@ -219,38 +216,15 @@ def test_is_running(temp_workdir): experiment = replicate.init() heartbeat_path = f".replicate/metadata/heartbeats/{experiment.id}.json" + assert wait( - lambda: os.path.exists(heartbeat_path), timeout_seconds=2, sleep_seconds=0.01 + lambda: os.path.exists(heartbeat_path), timeout_seconds=10, sleep_seconds=0.01 ) # Check whether experiment is running after heartbeats are started assert experiment.is_running() # Heartbeats stopped - experiment._heartbeat.kill() - assert experiment.is_running() - - # Modify heartbeat_metadata to record last heartbeat before last tolerable heartbeat - heartbeat_metadata = json.load(open(heartbeat_path)) - heartbeat_metadata["last_heartbeat"] = rfc3339_datetime( - datetime.datetime.utcnow() - HEARTBEAT_MISS_TOLERANCE * DEFAULT_REFRESH_INTERVAL - ) - - out_file = open(heartbeat_path, "w") - json.dump(heartbeat_metadata, out_file) - out_file.close() - - assert not experiment.is_running() - - # New experiment to test is_running after stop() - experiment = replicate.init() - heartbeat_path = f".replicate/metadata/heartbeats/{experiment.id}.json" - assert wait( - lambda: os.path.exists(heartbeat_path), timeout_seconds=2, sleep_seconds=0.01 - ) - assert experiment.is_running() - - # Check is_running after stopping the experiment experiment.stop() assert not experiment.is_running() @@ -293,37 +267,6 @@ def test_validate(self): in experiment.validate()[0] ) - def test_from_json(self): - data = { - "id": "3132f9288bcc09a6b4d283c95a3968379d6b01fcf5d06500e789f90fdb02b7e1", - "created": "2020-10-07T22:44:06.243914Z", - "params": {"learning_rate": 0.01, "num_epochs": 100}, - "user": "ben", - "host": "", - "command": "train.py", - "config": {"repository": ".replicate/"}, - "path": ".", - "python_version": "3.4.5", - "python_packages": {"foo": "1.0.0"}, - "checkpoints": [], - "replicate_version": "0.0.1", - } - exp = Experiment.from_json(None, data) - assert dataclasses.asdict(exp) == { - "id": "3132f9288bcc09a6b4d283c95a3968379d6b01fcf5d06500e789f90fdb02b7e1", - "created": datetime.datetime(2020, 10, 7, 22, 44, 6, 243914), - "params": {"learning_rate": 0.01, "num_epochs": 100}, - "user": "ben", - "host": "", - "command": "train.py", - "config": {"repository": ".replicate/"}, - "path": ".", - "python_version": "3.4.5", - "python_packages": {"foo": "1.0.0"}, - "checkpoints": [], - "replicate_version": "0.0.1", - } - def test_checkpoints(self, temp_workdir): project = Project() @@ -378,6 +321,11 @@ def get_paths(): str(p).replace(".replicate/", "") for p in Path(".replicate").rglob("*") ) + chk_tar_path = os.path.join(".replicate/checkpoints", chk.id + ".tar.gz") + wait( + lambda: os.path.exists(chk_tar_path), timeout_seconds=5, sleep_seconds=0.01, + ) + paths = get_paths() expected = set( [ @@ -449,6 +397,42 @@ def test_best_none(self, temp_workdir): ) assert experiment.best() is None + def test_exceptional_values(self, temp_workdir): + project = Project() + + with open("replicate.yaml", "w") as f: + f.write("repository: file://.replicate/") + + experiment = project.experiments.create(disable_heartbeat=True) + experiment.checkpoint( + path=None, + metrics={"accuracy": float("nan")}, + primary_metric=("accuracy", "maximize"), + ) + experiment.checkpoint( + path=None, + metrics={"accuracy": float("-inf")}, + primary_metric=("accuracy", "maximize"), + ) + experiment.checkpoint( + path=None, + metrics={"accuracy": float("+inf")}, + primary_metric=("accuracy", "maximize"), + ) + experiment.checkpoint( + path=None, + metrics={"accuracy": None}, + primary_metric=("accuracy", "maximize"), + ) + + experiment = project.experiments.get(experiment.id) + assert math.isnan(experiment.checkpoints[0].metrics["accuracy"]) + assert math.isinf(experiment.checkpoints[1].metrics["accuracy"]) + assert experiment.checkpoints[1].metrics["accuracy"] < 0 + assert math.isinf(experiment.checkpoints[2].metrics["accuracy"]) + assert experiment.checkpoints[2].metrics["accuracy"] > 0 + assert experiment.checkpoints[3].metrics["accuracy"] is None + class TestExperimentCollection: def test_get(self, temp_workdir): @@ -472,7 +456,7 @@ def test_get(self, temp_workdir): # get by prefix assert project.experiments.get(exp2.id[:7]).created == exp2.created - with pytest.raises(DoesNotExistError): + with pytest.raises(DoesNotExist): project.experiments.get("doesnotexist") def test_list(self, temp_workdir): @@ -502,20 +486,22 @@ def test_list(self, temp_workdir): # fmt: off [ # nothing -> bad - (False, False, False, ConfigNotFoundError), + (False, False, False, ConfigNotFound), # has config -> good (False, False, True, None), # has directory but no repo -> bad - (False, True, False, ConfigNotFoundError), + (False, True, False, ConfigNotFound), # has directory but no repo, and config exists -> good (False, True, True, None), - # has repo but no directory -> bad - (True, False, False, ValueError), - (True, False, True, ValueError), # even with config + # has repo but no directory, uses current working directory by default -> good + (True, False, False, None), + + # has repo, no directory, but infers directory from config -> good + (True, False, True, None), # has repo and directory -> good (True, True, False, None), @@ -583,7 +569,7 @@ def test_list_project_options( project = Project(repository=repo, directory=directory) if should_error: - with pytest.raises((ValueError, ConfigNotFoundError)): + with pytest.raises((ValueError, ConfigNotFound)): project.experiments.list() else: exps = project.experiments.list() diff --git a/python/tests/test_heartbeat.py b/python/tests/test_heartbeat.py deleted file mode 100644 index 074c4cf9..00000000 --- a/python/tests/test_heartbeat.py +++ /dev/null @@ -1,72 +0,0 @@ -import time -import json -import os -import datetime -import dateutil.parser -from dateutil.tz import tzutc -import pytest -from waiting import wait - -from replicate.heartbeat import Heartbeat - - -def test_heartbeat_running(tmpdir): - tmpdir = str(tmpdir) - path = "foo/heartbeat.json" - heartbeat = Heartbeat( - "experiment-id-foo", - "file://" + tmpdir, - path, - refresh_interval=datetime.timedelta(seconds=1), - ) - assert not heartbeat.is_alive() - - heartbeat.start() - assert heartbeat.is_alive() - - heartbeat.kill() - time.sleep(0.1) - assert not heartbeat.is_alive() - heartbeat.ensure_running() - assert heartbeat.is_alive() - - heartbeat.kill() - - -@pytest.mark.skip(reason="fix blocked on #436") -def test_heartbeat_write(tmpdir): - tmpdir = str(tmpdir) - t1 = datetime.datetime.utcnow().replace(tzinfo=tzutc()) - - path = "foo/heartbeat.json" - heartbeat = Heartbeat( - "experiment-id-foo", - "file://" + tmpdir, - path, - refresh_interval=datetime.timedelta(seconds=0.1), - ) - heartbeat.start() - - heartbeat_path = os.path.join(tmpdir, "foo", "heartbeat.json") - - wait(lambda: os.path.exists(heartbeat_path), timeout_seconds=1, sleep_seconds=0.01) - # sleep a little extra in case the file is created but not yet written - time.sleep(0.01) - - with open(heartbeat_path) as f: - obj = json.loads(f.read()) - last_heartbeat = dateutil.parser.parse(obj["last_heartbeat"]) - - t2 = datetime.datetime.utcnow().replace(tzinfo=tzutc()) - - assert t1 < last_heartbeat < t2 - - time.sleep(0.2) - - with open(heartbeat_path) as f: - obj = json.loads(f.read()) - new_last_heartbeat = dateutil.parser.parse(obj["last_heartbeat"]) - - assert t1 < last_heartbeat < t2 < new_last_heartbeat - - heartbeat.kill() diff --git a/python/tests/test_keras_callback.py b/python/tests/test_keras_callback.py index 3627841c..3e78f394 100644 --- a/python/tests/test_keras_callback.py +++ b/python/tests/test_keras_callback.py @@ -108,5 +108,5 @@ def test_keras_callback_with_no_filepath(temp_workdir): assert len(exp_meta["checkpoints"]) == 5 chkp_meta = exp_meta["checkpoints"][0] - assert chkp_meta["path"] is None + assert chkp_meta["path"] == "" assert not os.path.exists(".replicate/checkpoints/" + chkp_meta["id"] + ".tar.gz") diff --git a/python/tests/test_pb_convert.py b/python/tests/test_pb_convert.py new file mode 100644 index 00000000..179e7fae --- /dev/null +++ b/python/tests/test_pb_convert.py @@ -0,0 +1,201 @@ +import datetime + +from replicate import pb_convert +from replicate.checkpoint import Checkpoint, PrimaryMetric, CheckpointList +from replicate.experiment import Experiment +from replicate.servicepb import replicate_pb2 as pb +from replicate.project import Project + + +def full_checkpoint_pb(): + return pb.Checkpoint( + id="foo", + created=pb_convert.timestamp_to_pb( + datetime.datetime(2020, 12, 7, 1, 13, 29, 192682) + ), + path=".", + step=123, + metrics={ + "myint": pb.ParamType(intValue=456), + "myfloat": pb.ParamType(floatValue=7.89), + "mystring": pb.ParamType(stringValue="value"), + "mytrue": pb.ParamType(boolValue=True), + "myfalse": pb.ParamType(boolValue=False), + "mylist": pb.ParamType(objectValueJson="[1, 2, 3]"), + "mymap": pb.ParamType(objectValueJson='{"bar": "baz"}'), + }, + primaryMetric=pb.PrimaryMetric( + name="myfloat", goal=pb.PrimaryMetric.Goal.MAXIMIZE + ), + ) + + +def full_checkpoint(): + return Checkpoint( + id="foo", + created=datetime.datetime(2020, 12, 7, 1, 13, 29, 192682), + path=".", + step=123, + metrics={ + "myint": 456, + "myfloat": 7.89, + "mystring": "value", + "mytrue": True, + "myfalse": False, + "mylist": [1, 2, 3], + "mymap": {"bar": "baz"}, + }, + primary_metric=PrimaryMetric(name="myfloat", goal="maximize"), + ) + + +def empty_checkpoint_pb(): + return pb.Checkpoint( + id="foo", + created=pb_convert.timestamp_to_pb( + datetime.datetime(2020, 12, 7, 1, 13, 29, 192682) + ), + step=0, + ) + + +def empty_checkpoint(): + return Checkpoint( + id="foo", created=datetime.datetime(2020, 12, 7, 1, 13, 29, 192682), step=0, + ) + + +def full_experiment_pb(): + t = datetime.datetime(2020, 12, 7, 1, 13, 29, 192682) + return pb.Experiment( + id="foo", + created=pb_convert.timestamp_to_pb(t), + user="myuser", + host="myhost", + command="mycmd", + config=pb.Config(repository="myrepo", storage=""), + path="mypath", + params={ + "myint": pb.ParamType(intValue=456), + "myfloat": pb.ParamType(floatValue=7.89), + "mystring": pb.ParamType(stringValue="value"), + "mytrue": pb.ParamType(boolValue=True), + "myfalse": pb.ParamType(boolValue=False), + "mylist": pb.ParamType(objectValueJson="[1, 2, 3]"), + "mymap": pb.ParamType(objectValueJson='{"bar": "baz"}'), + }, + pythonPackages={"pkg1": "1.1", "pkg2": "2.2"}, + replicateVersion="1.2.3", + checkpoints=[ + pb.Checkpoint( + id="c1", + created=pb_convert.timestamp_to_pb(t + datetime.timedelta(minutes=1)), + step=1, + ), + pb.Checkpoint( + id="c2", + created=pb_convert.timestamp_to_pb(t + datetime.timedelta(minutes=2)), + step=2, + ), + ], + ) + + +def full_experiment(project): + t = datetime.datetime(2020, 12, 7, 1, 13, 29, 192682) + return Experiment( + project=project, + id="foo", + created=t, + user="myuser", + host="myhost", + command="mycmd", + config={"repository": "myrepo", "storage": ""}, + path="mypath", + params={ + "myint": 456, + "myfloat": 7.89, + "mystring": "value", + "mytrue": True, + "myfalse": False, + "mylist": [1, 2, 3], + "mymap": {"bar": "baz"}, + }, + python_packages={"pkg1": "1.1", "pkg2": "2.2"}, + replicate_version="1.2.3", + checkpoints=CheckpointList( + [ + Checkpoint(id="c1", created=t + datetime.timedelta(minutes=1), step=1,), + Checkpoint(id="c2", created=t + datetime.timedelta(minutes=2), step=2,), + ] + ), + ) + + +def empty_experiment_pb(): + return pb.Experiment( + id="foo", + created=pb_convert.timestamp_to_pb( + datetime.datetime(2020, 12, 7, 1, 13, 29, 192682) + ), + ) + + +def empty_experiment(project): + return Experiment( + project=project, + id="foo", + created=datetime.datetime(2020, 12, 7, 1, 13, 29, 192682), + ) + + +def test_checkpoint_from_pb(): + chk_pb = full_checkpoint_pb() + expected = full_checkpoint() + assert pb_convert.checkpoint_from_pb(None, chk_pb) == expected + + +def test_empty_checkpoint_from_pb(): + chk_pb = empty_checkpoint_pb() + expected = empty_checkpoint() + assert pb_convert.checkpoint_from_pb(None, chk_pb) == expected + + +def test_experiment_from_pb(): + exp_pb = full_experiment_pb() + project = Project() + expected = full_experiment(project) + assert pb_convert.experiment_from_pb(project, exp_pb) == expected + + +def test_empty_experiment_from_pb(): + exp_pb = empty_experiment_pb() + project = Project() + expected = empty_experiment(project) + assert pb_convert.experiment_from_pb(project, exp_pb) == expected + + +def test_checkpoint_to_pb(): + chk = full_checkpoint() + expected = full_checkpoint_pb() + assert pb_convert.checkpoint_to_pb(chk) == expected + + +def test_empty_checkpoint_to_pb(): + chk = empty_checkpoint() + expected = empty_checkpoint_pb() + assert pb_convert.checkpoint_to_pb(chk) == expected + + +def test_experiment_to_pb(): + project = Project() + exp = full_experiment(project) + expected = full_experiment_pb() + assert pb_convert.experiment_to_pb(exp) == expected + + +def test_empty_experiment_to_pb(): + project = Project() + exp = empty_experiment(project) + expected = empty_experiment_pb() + assert pb_convert.experiment_to_pb(exp) == expected diff --git a/python/tests/test_project.py b/python/tests/test_project.py deleted file mode 100644 index b135bb31..00000000 --- a/python/tests/test_project.py +++ /dev/null @@ -1,122 +0,0 @@ -import os -import tempfile - -import pytest - -from replicate.project import get_project_dir, Project, ProjectSpec -from replicate.exceptions import ConfigNotFoundError, CorruptedProjectSpec - - -@pytest.fixture -def temp_workdir_in_subdir(): - orig_cwd = os.getcwd() - try: - with tempfile.TemporaryDirectory() as tmpdir: - workdir = os.path.join(tmpdir, "foo", "bar") - os.makedirs(workdir) - os.chdir(workdir) - yield - finally: - os.chdir(orig_cwd) - - -@pytest.mark.parametrize("config_filename", ["replicate.yaml", "replicate.yml"]) -def test_get_project_dir(temp_workdir_in_subdir, config_filename): - # use getcwd instead of tempdir from fixture, because on OS X getcwd doesn't return same thing passed to chdir - root = os.path.abspath(os.path.join(os.getcwd(), "../../")) - - # replicate.yaml in current directory - open(os.path.join(root, "foo/bar/{}".format(config_filename)), "w").write("") - assert get_project_dir() == os.path.join(root, "foo/bar") - os.unlink(os.path.join(root, "foo/bar/{}".format(config_filename))) - - # up a directory - open(os.path.join(root, "foo/{}".format(config_filename)), "w").write("") - assert get_project_dir() == os.path.join(root, "foo") - os.unlink(os.path.join(root, "foo/{}".format(config_filename))) - - # up two directories - open(os.path.join(root, "{}".format(config_filename)), "w").write("") - assert get_project_dir() == root - os.unlink(os.path.join(root, "{}".format(config_filename))) - - # missing replicate.yaml - with pytest.raises(ConfigNotFoundError): - get_project_dir() - - -def test_load_project_spec(temp_workdir): - with open("replicate.yaml", "w") as f: - f.write("repository: file://.replicate/") - - os.mkdir(".replicate") - with open(".replicate/repository.json", "w") as f: - f.write( - """{ - "version": 1234 -}""" - ) - - project = Project() - assert project._load_project_spec() == ProjectSpec(version=1234) - - -def test_load_missing_project_spec(temp_workdir): - with open("replicate.yaml", "w") as f: - f.write("repository: file://.replicate/") - - project = Project() - assert project._load_project_spec() is None - - -def test_load_corrupted_project_spec(temp_workdir): - with open("replicate.yaml", "w") as f: - f.write("repository: file://.replicate/") - - project = Project() - os.mkdir(".replicate") - - with open(".replicate/repository.json", "w") as f: - f.write( - """{ - "version": asdf -}""" - ) - - with pytest.raises(CorruptedProjectSpec): - project._load_project_spec() - - with open(".replicate/repository.json", "w") as f: - f.write( - """{ - "foo": "bar" -}""" - ) - - with pytest.raises(CorruptedProjectSpec): - project._load_project_spec() - - -def test_write_project_spec(temp_workdir): - with open("replicate.yaml", "w") as f: - f.write("repository: file://.replicate/") - - project = Project() - project._write_project_spec(version=1234) - - with open(".replicate/repository.json") as f: - assert ( - f.read() - == """{ - "version": 1234 -}""" - ) - - -def test_write_load_project_spec(temp_workdir): - with open("replicate.yaml", "w") as f: - f.write("repository: file://.replicate/") - - project = Project() - project._write_project_spec(version=1234) - assert project._load_project_spec().version == 1234